summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitattributes6
-rw-r--r--.mailmap65
-rw-r--r--META-INF/MANIFEST.MF7
-rw-r--r--README.rst13
-rw-r--r--bincompat-backward.whitelist.conf163
-rw-r--r--bincompat-forward.whitelist.conf359
-rw-r--r--build.detach.xml186
-rw-r--r--build.examples.xml29
-rw-r--r--build.number4
-rw-r--r--build.number.maven2
-rw-r--r--build.xml653
-rw-r--r--docs/LICENSE2
-rw-r--r--docs/examples/actors/pingpong.scala2
-rw-r--r--docs/examples/jolib/Ref.scala2
-rw-r--r--docs/examples/jolib/parallelOr.scala4
-rw-r--r--docs/examples/parsing/ArithmeticParser.scala2
-rw-r--r--docs/examples/swing/ColorChooserDemo.scala61
-rw-r--r--docs/examples/swing/PopupDemo.scala33
-rw-r--r--lib/fjbg.jar.desired.sha11
-rw-r--r--lib/msil.jar.desired.sha11
-rw-r--r--lib/scala-compiler-src.jar.desired.sha12
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--lib/scala-reflect-src.jar.desired.sha12
-rw-r--r--lib/scala-reflect.jar.desired.sha12
-rw-r--r--project/Build.scala40
-rw-r--r--project/Layers.scala14
-rw-r--r--project/Packaging.scala4
-rw-r--r--project/Partest.scala3
-rw-r--r--project/ShaResolve.scala1
-rw-r--r--project/Testing.scala10
-rw-r--r--project/project/Build.scala2
-rw-r--r--src/actors/scala/actors/AbstractActor.scala1
-rw-r--r--src/actors/scala/actors/Actor.scala2
-rw-r--r--src/actors/scala/actors/ActorRef.scala3
-rw-r--r--src/actors/scala/actors/CanReply.scala1
-rw-r--r--src/actors/scala/actors/Channel.scala2
-rw-r--r--src/actors/scala/actors/DaemonActor.scala1
-rw-r--r--src/actors/scala/actors/Debug.scala1
-rw-r--r--src/actors/scala/actors/Future.scala4
-rw-r--r--src/actors/scala/actors/IScheduler.scala1
-rw-r--r--src/actors/scala/actors/InputChannel.scala1
-rw-r--r--src/actors/scala/actors/InternalActor.scala2
-rw-r--r--src/actors/scala/actors/InternalReplyReactor.scala1
-rw-r--r--src/actors/scala/actors/OutputChannel.scala1
-rw-r--r--src/actors/scala/actors/Reactor.scala1
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala2
-rw-r--r--src/actors/scala/actors/Scheduler.scala1
-rw-r--r--src/actors/scala/actors/SchedulerAdapter.scala1
-rw-r--r--src/actors/scala/actors/UncaughtException.scala1
-rw-r--r--src/actors/scala/actors/package.scala1
-rw-r--r--src/actors/scala/actors/remote/JavaSerializer.scala1
-rw-r--r--src/actors/scala/actors/remote/RemoteActor.scala2
-rw-r--r--src/actors/scala/actors/remote/Serializer.scala1
-rw-r--r--src/actors/scala/actors/remote/Service.scala1
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ActorGC.scala1
-rw-r--r--src/actors/scala/actors/scheduler/DaemonScheduler.scala1
-rw-r--r--src/actors/scala/actors/scheduler/ExecutorScheduler.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ForkJoinScheduler.scala1
-rw-r--r--src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala1
-rw-r--r--src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala1
-rw-r--r--src/asm/scala/tools/asm/AnnotationVisitor.java66
-rw-r--r--src/asm/scala/tools/asm/AnnotationWriter.java58
-rw-r--r--src/asm/scala/tools/asm/Attribute.java193
-rw-r--r--src/asm/scala/tools/asm/ByteVector.java49
-rw-r--r--src/asm/scala/tools/asm/ClassReader.java2980
-rw-r--r--src/asm/scala/tools/asm/ClassVisitor.java233
-rw-r--r--src/asm/scala/tools/asm/ClassWriter.java499
-rw-r--r--src/asm/scala/tools/asm/Context.java110
-rw-r--r--src/asm/scala/tools/asm/FieldVisitor.java34
-rw-r--r--src/asm/scala/tools/asm/FieldWriter.java72
-rw-r--r--src/asm/scala/tools/asm/Frame.java1024
-rw-r--r--src/asm/scala/tools/asm/Handle.java48
-rw-r--r--src/asm/scala/tools/asm/Handler.java9
-rw-r--r--src/asm/scala/tools/asm/Item.java162
-rw-r--r--src/asm/scala/tools/asm/Label.java135
-rw-r--r--src/asm/scala/tools/asm/MethodVisitor.java516
-rw-r--r--src/asm/scala/tools/asm/MethodWriter.java1165
-rw-r--r--src/asm/scala/tools/asm/Type.java254
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureReader.java181
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureVisitor.java51
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureWriter.java2
-rw-r--r--src/asm/scala/tools/asm/tree/AbstractInsnNode.java30
-rw-r--r--src/asm/scala/tools/asm/tree/AnnotationNode.java55
-rw-r--r--src/asm/scala/tools/asm/tree/ClassNode.java102
-rw-r--r--src/asm/scala/tools/asm/tree/FieldInsnNode.java34
-rw-r--r--src/asm/scala/tools/asm/tree/FieldNode.java104
-rw-r--r--src/asm/scala/tools/asm/tree/FrameNode.java121
-rw-r--r--src/asm/scala/tools/asm/tree/IincInsnNode.java8
-rw-r--r--src/asm/scala/tools/asm/tree/InnerClassNode.java42
-rw-r--r--src/asm/scala/tools/asm/tree/InsnList.java120
-rw-r--r--src/asm/scala/tools/asm/tree/InsnNode.java33
-rw-r--r--src/asm/scala/tools/asm/tree/IntInsnNode.java13
-rw-r--r--src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java22
-rw-r--r--src/asm/scala/tools/asm/tree/JumpInsnNode.java25
-rw-r--r--src/asm/scala/tools/asm/tree/LabelNode.java2
-rw-r--r--src/asm/scala/tools/asm/tree/LdcInsnNode.java11
-rw-r--r--src/asm/scala/tools/asm/tree/LineNumberNode.java8
-rw-r--r--src/asm/scala/tools/asm/tree/LocalVariableNode.java45
-rw-r--r--src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java21
-rw-r--r--src/asm/scala/tools/asm/tree/MethodInsnNode.java38
-rw-r--r--src/asm/scala/tools/asm/tree/MethodNode.java236
-rw-r--r--src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java10
-rw-r--r--src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java30
-rw-r--r--src/asm/scala/tools/asm/tree/TryCatchBlockNode.java32
-rw-r--r--src/asm/scala/tools/asm/tree/TypeInsnNode.java19
-rw-r--r--src/asm/scala/tools/asm/tree/VarInsnNode.java21
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Analyzer.java160
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java11
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java483
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicValue.java9
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java598
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Frame.java854
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Interpreter.java110
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java119
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java148
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceValue.java8
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Subroutine.java9
-rw-r--r--src/asm/scala/tools/asm/util/ASMifiable.java13
-rw-r--r--src/asm/scala/tools/asm/util/ASMifier.java485
-rw-r--r--src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java28
-rw-r--r--src/asm/scala/tools/asm/util/CheckClassAdapter.java587
-rw-r--r--src/asm/scala/tools/asm/util/CheckFieldAdapter.java23
-rw-r--r--src/asm/scala/tools/asm/util/CheckMethodAdapter.java937
-rw-r--r--src/asm/scala/tools/asm/util/CheckSignatureAdapter.java57
-rw-r--r--src/asm/scala/tools/asm/util/Printer.java316
-rw-r--r--src/asm/scala/tools/asm/util/SignatureChecker.java47
-rw-r--r--src/asm/scala/tools/asm/util/Textifiable.java8
-rw-r--r--src/asm/scala/tools/asm/util/Textifier.java446
-rw-r--r--src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java23
-rw-r--r--src/asm/scala/tools/asm/util/TraceClassVisitor.java159
-rw-r--r--src/asm/scala/tools/asm/util/TraceFieldVisitor.java10
-rw-r--r--src/asm/scala/tools/asm/util/TraceMethodVisitor.java89
-rw-r--r--src/asm/scala/tools/asm/util/TraceSignatureVisitor.java59
-rw-r--r--src/build/genprod.scala4
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Context.scala1
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Enclosures.scala24
-rw-r--r--src/compiler/scala/reflect/macros/runtime/ExprUtils.scala1
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Names.scala13
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Synthetics.scala83
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Typers.scala6
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala6
-rw-r--r--src/compiler/scala/reflect/reify/Phases.scala5
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala8
-rw-r--r--src/compiler/scala/reflect/reify/States.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenNames.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenPositions.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala9
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala24
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala63
-rw-r--r--src/compiler/scala/reflect/reify/package.scala11
-rw-r--r--src/compiler/scala/reflect/reify/phases/Calculate.scala3
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala15
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reify.scala6
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala36
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala4
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala22
-rw-r--r--src/compiler/scala/reflect/reify/utils/SymbolTables.scala16
-rw-r--r--src/compiler/scala/tools/ant/Pack200Task.scala6
-rw-r--r--src/compiler/scala/tools/ant/Same.scala4
-rw-r--r--src/compiler/scala/tools/ant/ScalaTool.scala14
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala21
-rw-r--r--src/compiler/scala/tools/ant/antlib.xml2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala7
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Settings.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/TaskArgs.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Use.scala4
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl3
-rw-r--r--src/compiler/scala/tools/cmd/CommandLine.scala2
-rw-r--r--src/compiler/scala/tools/cmd/FromString.scala11
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala3
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala2
-rw-r--r--src/compiler/scala/tools/cmd/gen/CodegenSpec.scala2
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala51
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala1
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala19
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala8
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala14
-rw-r--r--src/compiler/scala/tools/nsc/CompilerRun.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ConsoleWriter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala8
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala491
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala80
-rw-r--r--src/compiler/scala/tools/nsc/MainBench.scala16
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala8
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala6
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala6
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala16
-rw-r--r--src/compiler/scala/tools/nsc/Phases.scala3
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala14
-rw-r--r--src/compiler/scala/tools/nsc/SubComponent.scala4
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala20
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala20
-rw-r--r--src/compiler/scala/tools/nsc/ast/Positions.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala89
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala27
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala91
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala128
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala66
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala56
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala68
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala286
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala259
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala12
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala38
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala76
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/MSILPlatform.scala69
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala9
-rw-r--r--src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala96
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala717
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala66
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala203
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala92
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala128
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala27
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala64
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Repository.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala49
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala84
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala84
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala32
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala600
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala62
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala1921
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala142
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala2358
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala34
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala54
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala18
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala254
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Files.scala177
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala93
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala285
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala355
-rw-r--r--src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala103
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala50
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala125
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala49
-rw-r--r--src/compiler/scala/tools/nsc/io/Fileish.scala33
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala36
-rw-r--r--src/compiler/scala/tools/nsc/io/Lexer.scala12
-rw-r--r--src/compiler/scala/tools/nsc/io/MsilFile.scala18
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala93
-rw-r--r--src/compiler/scala/tools/nsc/io/Replayer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Socket.scala5
-rw-r--r--src/compiler/scala/tools/nsc/io/SourceReader.scala7
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala17
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala183
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala286
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaTokens.scala6
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala138
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala259
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala193
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala870
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala137
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala499
-rw-r--r--src/compiler/scala/tools/nsc/package.scala12
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala129
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginComponent.scala8
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginDescription.scala51
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala19
-rw-r--r--src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala12
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala9
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala9
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/Mixer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala13
-rw-r--r--src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala77
-rw-r--r--src/compiler/scala/tools/nsc/settings/AestheticSettings.scala39
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala68
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala26
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala9
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala19
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala57
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala7
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala127
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala157
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala174
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala137
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala850
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala44
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala216
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala119
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala35
-rw-r--r--src/compiler/scala/tools/nsc/transform/InfoTransform.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/InlineErasure.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala20
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala149
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala20
-rw-r--r--src/compiler/scala/tools/nsc/transform/PostErasure.scala60
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala352
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala12
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala243
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala13
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala38
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala92
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala504
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala86
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala68
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala617
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala359
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala141
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala206
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala366
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala120
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala50
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala150
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala)57
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala1896
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala94
-rw-r--r--src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala)49
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala65
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLineParser.scala5
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/Exceptional.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/FreshNameCreator.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala63
-rw-r--r--src/compiler/scala/tools/nsc/util/MsilClassPath.scala169
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala48
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala11
-rw-r--r--src/compiler/scala/tools/nsc/util/SimpleTracer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala32
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala48
-rw-r--r--src/compiler/scala/tools/reflect/FrontEnd.scala2
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala11
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala3
-rw-r--r--src/compiler/scala/tools/reflect/StdTags.scala5
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala2
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala79
-rw-r--r--src/compiler/scala/tools/reflect/package.scala5
-rw-r--r--src/compiler/scala/tools/util/Javap.scala157
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala30
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala4
-rw-r--r--src/continuations/library/scala/util/continuations/ControlContext.scala4
-rw-r--r--src/continuations/library/scala/util/continuations/package.scala8
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala51
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala7
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala16
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala5
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala21
-rw-r--r--src/detach/library/scala/remoting/Channel.scala190
-rw-r--r--src/detach/library/scala/remoting/Debug.scala27
-rw-r--r--src/detach/library/scala/remoting/ServerChannel.scala68
-rw-r--r--src/detach/library/scala/remoting/detach.scala49
-rw-r--r--src/detach/library/scala/runtime/RemoteRef.scala182
-rw-r--r--src/detach/library/scala/runtime/remoting/Debug.scala85
-rw-r--r--src/detach/library/scala/runtime/remoting/RegistryDelegate.scala192
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteByteRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteCharRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala50
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala50
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteGC.scala66
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteIntRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteLongRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteShortRef.scala50
-rw-r--r--src/detach/plugin/scala/tools/detach/Detach.scala1190
-rw-r--r--src/detach/plugin/scala/tools/detach/DetachPlugin.scala41
-rw-r--r--src/detach/plugin/scalac-plugin.xml4
-rw-r--r--src/eclipse/README.md4
-rw-r--r--src/eclipse/fjbg/.classpath7
-rw-r--r--src/eclipse/interactive/.classpath10
-rw-r--r--src/eclipse/interactive/.project35
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/repl/.classpath11
-rw-r--r--src/eclipse/repl/.project35
-rw-r--r--src/eclipse/scala-compiler/.classpath2
-rw-r--r--src/eclipse/scaladoc/.classpath13
-rw-r--r--src/eclipse/scaladoc/.project (renamed from src/eclipse/fjbg/.project)15
-rw-r--r--src/eclipse/scalap/.classpath1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java195
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java35
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java62
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java84
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java101
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JClass.java420
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCode.java1308
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java125
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java377
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java771
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java69
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java83
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java90
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java667
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JField.java62
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java138
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java201
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLabel.java30
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java121
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java42
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java167
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMember.java109
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMethod.java199
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java87
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java65
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java1267
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java77
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java19
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java69
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java282
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JType.java316
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/Main.java131
-rw-r--r--src/fjbg/ch/epfl/lamp/util/ByteArray.java145
-rw-r--r--src/intellij/compiler.iml.SAMPLE2
-rw-r--r--src/intellij/continuations-library.iml.SAMPLE (renamed from src/intellij/msil.iml.SAMPLE)5
-rw-r--r--src/intellij/continuations-plugin.iml.SAMPLE25
-rw-r--r--src/intellij/fjbg.iml.SAMPLE12
-rw-r--r--src/intellij/interactive.iml.SAMPLE25
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE7
-rw-r--r--src/intellij/scaladoc.iml.SAMPLE24
-rw-r--r--src/intellij/test.iml.SAMPLE2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala (renamed from src/compiler/scala/tools/nsc/interactive/CompilerControl.scala)26
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ContextTrees.scala (renamed from src/compiler/scala/tools/nsc/interactive/ContextTrees.scala)1
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala (renamed from src/compiler/scala/tools/nsc/interactive/Global.scala)178
-rw-r--r--src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala (renamed from src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Main.scala34
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Picklers.scala (renamed from src/compiler/scala/tools/nsc/interactive/Picklers.scala)8
-rw-r--r--src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala (renamed from src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/REPL.scala (renamed from src/compiler/scala/tools/nsc/interactive/REPL.scala)18
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RangePositions.scala14
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Response.scala (renamed from src/compiler/scala/tools/nsc/interactive/Response.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala (renamed from src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala (renamed from src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala)4
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala)10
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala)7
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/Tester.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/Tester.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala)6
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala)12
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala)3
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala)4
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala)0
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java2
-rw-r--r--src/library-aux/scala/Any.scala2
-rw-r--r--src/library-aux/scala/AnyRef.scala2
-rw-r--r--src/library-aux/scala/Nothing.scala2
-rw-r--r--src/library-aux/scala/Null.scala2
-rw-r--r--src/library/scala/Application.scala79
-rw-r--r--src/library/scala/Array.scala14
-rw-r--r--src/library/scala/Boolean.scala4
-rw-r--r--src/library/scala/Byte.scala2
-rw-r--r--src/library/scala/Char.scala2
-rw-r--r--src/library/scala/Console.scala344
-rw-r--r--src/library/scala/Double.scala4
-rw-r--r--src/library/scala/Enumeration.scala19
-rw-r--r--src/library/scala/Float.scala2
-rw-r--r--src/library/scala/Function0.scala2
-rw-r--r--src/library/scala/Function1.scala2
-rw-r--r--src/library/scala/Function10.scala2
-rw-r--r--src/library/scala/Function11.scala2
-rw-r--r--src/library/scala/Function12.scala2
-rw-r--r--src/library/scala/Function13.scala2
-rw-r--r--src/library/scala/Function14.scala2
-rw-r--r--src/library/scala/Function15.scala2
-rw-r--r--src/library/scala/Function16.scala2
-rw-r--r--src/library/scala/Function17.scala2
-rw-r--r--src/library/scala/Function18.scala2
-rw-r--r--src/library/scala/Function19.scala2
-rw-r--r--src/library/scala/Function2.scala2
-rw-r--r--src/library/scala/Function20.scala2
-rw-r--r--src/library/scala/Function21.scala2
-rw-r--r--src/library/scala/Function22.scala2
-rw-r--r--src/library/scala/Function3.scala2
-rw-r--r--src/library/scala/Function4.scala2
-rw-r--r--src/library/scala/Function5.scala2
-rw-r--r--src/library/scala/Function6.scala2
-rw-r--r--src/library/scala/Function7.scala2
-rw-r--r--src/library/scala/Function8.scala2
-rw-r--r--src/library/scala/Function9.scala2
-rw-r--r--src/library/scala/Int.scala2
-rw-r--r--src/library/scala/Long.scala2
-rw-r--r--src/library/scala/LowPriorityImplicits.scala2
-rw-r--r--src/library/scala/Option.scala11
-rw-r--r--src/library/scala/Predef.scala186
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/Product1.scala2
-rw-r--r--src/library/scala/Product10.scala2
-rw-r--r--src/library/scala/Product11.scala2
-rw-r--r--src/library/scala/Product12.scala2
-rw-r--r--src/library/scala/Product13.scala2
-rw-r--r--src/library/scala/Product14.scala2
-rw-r--r--src/library/scala/Product15.scala2
-rw-r--r--src/library/scala/Product16.scala2
-rw-r--r--src/library/scala/Product17.scala2
-rw-r--r--src/library/scala/Product18.scala2
-rw-r--r--src/library/scala/Product19.scala2
-rw-r--r--src/library/scala/Product2.scala2
-rw-r--r--src/library/scala/Product20.scala2
-rw-r--r--src/library/scala/Product21.scala2
-rw-r--r--src/library/scala/Product22.scala2
-rw-r--r--src/library/scala/Product3.scala2
-rw-r--r--src/library/scala/Product4.scala2
-rw-r--r--src/library/scala/Product5.scala2
-rw-r--r--src/library/scala/Product6.scala2
-rw-r--r--src/library/scala/Product7.scala2
-rw-r--r--src/library/scala/Product8.scala2
-rw-r--r--src/library/scala/Product9.scala2
-rw-r--r--src/library/scala/SerialVersionUID.scala2
-rw-r--r--src/library/scala/Short.scala2
-rw-r--r--src/library/scala/Specializable.scala2
-rw-r--r--src/library/scala/Tuple1.scala2
-rw-r--r--src/library/scala/Tuple10.scala2
-rw-r--r--src/library/scala/Tuple11.scala2
-rw-r--r--src/library/scala/Tuple12.scala2
-rw-r--r--src/library/scala/Tuple13.scala2
-rw-r--r--src/library/scala/Tuple14.scala2
-rw-r--r--src/library/scala/Tuple15.scala2
-rw-r--r--src/library/scala/Tuple16.scala2
-rw-r--r--src/library/scala/Tuple17.scala2
-rw-r--r--src/library/scala/Tuple18.scala2
-rw-r--r--src/library/scala/Tuple19.scala2
-rw-r--r--src/library/scala/Tuple2.scala2
-rw-r--r--src/library/scala/Tuple20.scala2
-rw-r--r--src/library/scala/Tuple21.scala2
-rw-r--r--src/library/scala/Tuple22.scala2
-rw-r--r--src/library/scala/Tuple3.scala2
-rw-r--r--src/library/scala/Tuple4.scala2
-rw-r--r--src/library/scala/Tuple5.scala2
-rw-r--r--src/library/scala/Tuple6.scala2
-rw-r--r--src/library/scala/Tuple7.scala2
-rw-r--r--src/library/scala/Tuple8.scala2
-rw-r--r--src/library/scala/Tuple9.scala2
-rw-r--r--src/library/scala/UninitializedFieldError.scala6
-rw-r--r--src/library/scala/Unit.scala5
-rw-r--r--src/library/scala/annotation/serializable.scala15
-rw-r--r--src/library/scala/annotation/target/package.scala29
-rw-r--r--src/library/scala/beans/ScalaBeanInfo.scala6
-rw-r--r--src/library/scala/collection/BitSetLike.scala9
-rw-r--r--src/library/scala/collection/DefaultMap.scala10
-rw-r--r--src/library/scala/collection/GenIterableLike.scala2
-rw-r--r--src/library/scala/collection/GenIterableView.scala7
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala7
-rw-r--r--src/library/scala/collection/GenSeqView.scala7
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala2
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala18
-rw-r--r--src/library/scala/collection/GenTraversableView.scala7
-rw-r--r--src/library/scala/collection/GenTraversableViewLike.scala2
-rw-r--r--src/library/scala/collection/IndexedSeq.scala7
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala6
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala20
-rw-r--r--src/library/scala/collection/Iterable.scala1
-rw-r--r--src/library/scala/collection/IterableLike.scala48
-rw-r--r--src/library/scala/collection/IterableProxy.scala2
-rw-r--r--src/library/scala/collection/IterableViewLike.scala1
-rw-r--r--src/library/scala/collection/Iterator.scala21
-rw-r--r--src/library/scala/collection/JavaConversions.scala38
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala37
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala8
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala16
-rw-r--r--src/library/scala/collection/MapLike.scala4
-rw-r--r--src/library/scala/collection/MapProxyLike.scala2
-rw-r--r--src/library/scala/collection/Parallelizable.scala2
-rw-r--r--src/library/scala/collection/Searching.scala116
-rw-r--r--src/library/scala/collection/SeqLike.scala60
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala2
-rw-r--r--src/library/scala/collection/SeqViewLike.scala1
-rw-r--r--src/library/scala/collection/Sequentializable.scala.disabled10
-rw-r--r--src/library/scala/collection/SetLike.scala8
-rw-r--r--src/library/scala/collection/SetProxyLike.scala3
-rw-r--r--src/library/scala/collection/SortedMap.scala4
-rw-r--r--src/library/scala/collection/SortedMapLike.scala31
-rw-r--r--src/library/scala/collection/SortedSetLike.scala10
-rw-r--r--src/library/scala/collection/Traversable.scala4
-rw-r--r--src/library/scala/collection/TraversableLike.scala25
-rw-r--r--src/library/scala/collection/TraversableOnce.scala29
-rw-r--r--src/library/scala/collection/TraversableView.scala1
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala5
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala16
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala22
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala19
-rw-r--r--src/library/scala/collection/convert/Decorators.scala2
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala21
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala44
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala61
-rw-r--r--src/library/scala/collection/generic/GenMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala19
-rw-r--r--src/library/scala/collection/generic/GenericClassTagCompanion.scala6
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala6
-rw-r--r--src/library/scala/collection/generic/GenericOrderedCompanion.scala6
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala10
-rw-r--r--src/library/scala/collection/generic/Growable.scala17
-rw-r--r--src/library/scala/collection/generic/IndexedSeqFactory.scala (renamed from src/library/scala/ScalaObject.scala)17
-rw-r--r--src/library/scala/collection/generic/IsSeqLike.scala57
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala5
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala2
-rw-r--r--src/library/scala/collection/generic/Signalling.scala6
-rw-r--r--src/library/scala/collection/generic/Sorted.scala32
-rw-r--r--src/library/scala/collection/generic/SortedMapFactory.scala2
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala4
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala9
-rwxr-xr-xsrc/library/scala/collection/immutable/DefaultMap.scala8
-rw-r--r--src/library/scala/collection/immutable/GenIterable.scala.disabled37
-rw-r--r--src/library/scala/collection/immutable/GenMap.scala.disabled36
-rw-r--r--src/library/scala/collection/immutable/GenSeq.scala.disabled49
-rw-r--r--src/library/scala/collection/immutable/GenSet.scala.disabled43
-rw-r--r--src/library/scala/collection/immutable/GenTraversable.scala.disabled41
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala8
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala4
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala5
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala9
-rw-r--r--src/library/scala/collection/immutable/List.scala281
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala10
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala22
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala2
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala32
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala4
-rw-r--r--src/library/scala/collection/immutable/Range.scala25
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala293
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala146
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala10
-rw-r--r--src/library/scala/collection/immutable/Stream.scala47
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala14
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala2
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala13
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala10
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala7
-rw-r--r--src/library/scala/collection/immutable/Vector.scala58
-rw-r--r--src/library/scala/collection/immutable/package.scala93
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala17
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala16
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala4
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala4
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala5
-rw-r--r--src/library/scala/collection/mutable/Builder.scala2
-rw-r--r--src/library/scala/collection/mutable/ConcurrentMap.scala90
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala123
-rw-r--r--src/library/scala/collection/mutable/GenIterable.scala.disabled37
-rw-r--r--src/library/scala/collection/mutable/GenMap.scala.disabled40
-rw-r--r--src/library/scala/collection/mutable/GenSeq.scala.disabled44
-rw-r--r--src/library/scala/collection/mutable/GenSet.scala.disabled46
-rw-r--r--src/library/scala/collection/mutable/GenTraversable.scala.disabled38
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala10
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala20
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala20
-rw-r--r--src/library/scala/collection/mutable/History.scala8
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqLike.scala3
-rwxr-xr-xsrc/library/scala/collection/mutable/IndexedSeqOptimized.scala3
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala6
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala5
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala52
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala4
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala5
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/ObservableMap.scala2
-rw-r--r--src/library/scala/collection/mutable/ObservableSet.scala2
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala40
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala12
-rw-r--r--src/library/scala/collection/mutable/PriorityQueueProxy.scala12
-rw-r--r--src/library/scala/collection/mutable/Publisher.scala2
-rw-r--r--src/library/scala/collection/mutable/Queue.scala9
-rw-r--r--src/library/scala/collection/mutable/QueueProxy.scala4
-rw-r--r--src/library/scala/collection/mutable/RevertibleHistory.scala4
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala1
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala4
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala4
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala12
-rw-r--r--src/library/scala/collection/mutable/SynchronizedQueue.scala6
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala4
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala4
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala110
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala4
-rw-r--r--src/library/scala/collection/mutable/WrappedArrayBuilder.scala1
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala1
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala117
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala7
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala4
-rw-r--r--src/library/scala/collection/parallel/ParSeq.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala16
-rw-r--r--src/library/scala/collection/parallel/ParSeqView.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeqViewLike.scala8
-rw-r--r--src/library/scala/collection/parallel/ParSet.scala6
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala8
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala90
-rw-r--r--src/library/scala/collection/parallel/Splitter.scala2
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala83
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled128
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala5
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala1
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala33
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala28
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala77
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParTrieMap.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala5
-rw-r--r--src/library/scala/collection/parallel/package.scala2
-rw-r--r--src/library/scala/collection/script/Message.scala2
-rw-r--r--src/library/scala/concurrent/BatchingExecutor.scala117
-rw-r--r--src/library/scala/concurrent/Future.scala112
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala4
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala32
-rw-r--r--src/library/scala/concurrent/TaskRunners.scala36
-rw-r--r--src/library/scala/concurrent/ThreadRunner.scala60
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala4
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala29
-rw-r--r--src/library/scala/concurrent/impl/Future.scala2
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala6
-rw-r--r--src/library/scala/concurrent/ops.scala73
-rw-r--r--src/library/scala/io/AnsiColor.scala53
-rw-r--r--src/library/scala/io/BufferedSource.scala2
-rw-r--r--src/library/scala/io/BytePickle.scala318
-rw-r--r--src/library/scala/io/Codec.scala55
-rw-r--r--src/library/scala/io/Position.scala8
-rw-r--r--src/library/scala/io/ReadStdin.scala228
-rw-r--r--src/library/scala/io/Source.scala12
-rw-r--r--src/library/scala/io/UTF8Codec.scala32
-rw-r--r--src/library/scala/math/BigDecimal.scala8
-rw-r--r--src/library/scala/math/BigInt.scala13
-rw-r--r--src/library/scala/math/Ordering.scala6
-rw-r--r--src/library/scala/math/ScalaNumericConversions.scala16
-rw-r--r--src/library/scala/package.scala13
-rw-r--r--src/library/scala/parallel/Future.scala39
-rw-r--r--src/library/scala/parallel/package.scala.disabled178
-rw-r--r--src/library/scala/ref/SoftReference.scala3
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala4
-rw-r--r--src/library/scala/runtime/AbstractFunction0.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction1.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction10.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction11.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction12.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction13.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction14.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction15.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction16.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction17.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction18.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction19.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction2.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction20.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction21.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction22.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction3.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction4.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction5.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction6.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction7.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction8.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction9.scala2
-rw-r--r--src/library/scala/runtime/Null$.scala5
-rw-r--r--src/library/scala/runtime/RichException.scala1
-rw-r--r--src/library/scala/runtime/ScalaNumberProxy.scala4
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala5
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala3
-rw-r--r--src/library/scala/runtime/StringAdd.scala1
-rw-r--r--src/library/scala/runtime/StringFormat.scala1
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala26
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala26
-rw-r--r--src/library/scala/sys/SystemProperties.scala1
-rw-r--r--src/library/scala/sys/process/BasicIO.scala2
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala46
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala30
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala12
-rw-r--r--src/library/scala/sys/process/package.scala2
-rw-r--r--src/library/scala/testing/Benchmark.scala114
-rw-r--r--src/library/scala/testing/Show.scala75
-rw-r--r--src/library/scala/text/Document.scala2
-rw-r--r--src/library/scala/throws.scala2
-rw-r--r--src/library/scala/util/Either.scala6
-rw-r--r--src/library/scala/util/Marshal.scala50
-rw-r--r--src/library/scala/util/MurmurHash.scala2
-rw-r--r--src/library/scala/util/Random.scala4
-rw-r--r--src/library/scala/util/Try.scala2
-rw-r--r--src/library/scala/util/control/NonFatal.scala4
-rw-r--r--src/library/scala/util/grammar/HedgeRHS.scala26
-rw-r--r--src/library/scala/util/grammar/TreeRHS.scala22
-rw-r--r--src/library/scala/util/matching/Regex.scala110
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala1
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Scanners.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/StdLexical.scala2
-rw-r--r--src/library/scala/util/parsing/combinator/testing/Tester.scala1
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala2
-rw-r--r--src/library/scala/util/parsing/input/Position.scala4
-rw-r--r--src/library/scala/util/parsing/json/JSON.scala3
-rw-r--r--src/library/scala/util/parsing/json/Lexer.scala1
-rw-r--r--src/library/scala/util/parsing/json/Parser.scala1
-rw-r--r--src/library/scala/xml/Attribute.scala2
-rwxr-xr-xsrc/library/scala/xml/Elem.scala2
-rw-r--r--src/library/scala/xml/Equality.scala4
-rw-r--r--src/library/scala/xml/Group.scala2
-rw-r--r--src/library/scala/xml/NamespaceBinding.scala22
-rwxr-xr-xsrc/library/scala/xml/Node.scala12
-rw-r--r--src/library/scala/xml/NodeSeq.scala5
-rwxr-xr-xsrc/library/scala/xml/PrettyPrinter.scala5
-rwxr-xr-xsrc/library/scala/xml/Utility.scala22
-rwxr-xr-xsrc/library/scala/xml/XML.scala2
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala3
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala74
-rw-r--r--src/library/scala/xml/dtd/Decl.scala2
-rw-r--r--src/library/scala/xml/dtd/DocType.scala12
-rw-r--r--src/library/scala/xml/dtd/ElementValidator.scala16
-rw-r--r--src/library/scala/xml/dtd/ExternalID.scala14
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala30
-rw-r--r--src/library/scala/xml/dtd/ValidationException.scala2
-rw-r--r--src/library/scala/xml/dtd/impl/Base.scala (renamed from src/library/scala/util/regexp/Base.scala)4
-rw-r--r--src/library/scala/xml/dtd/impl/BaseBerrySethi.scala (renamed from src/library/scala/util/automata/BaseBerrySethi.scala)5
-rw-r--r--src/library/scala/xml/dtd/impl/DetWordAutom.scala (renamed from src/library/scala/util/automata/DetWordAutom.scala)4
-rw-r--r--src/library/scala/xml/dtd/impl/Inclusion.scala (renamed from src/library/scala/util/automata/Inclusion.scala)4
-rw-r--r--src/library/scala/xml/dtd/impl/NondetWordAutom.scala (renamed from src/library/scala/util/automata/NondetWordAutom.scala)8
-rw-r--r--src/library/scala/xml/dtd/impl/PointedHedgeExp.scala (renamed from src/library/scala/util/regexp/PointedHedgeExp.scala)4
-rw-r--r--src/library/scala/xml/dtd/impl/SubsetConstruction.scala (renamed from src/library/scala/util/automata/SubsetConstruction.scala)6
-rw-r--r--src/library/scala/xml/dtd/impl/SyntaxError.scala (renamed from src/library/scala/util/regexp/SyntaxError.scala)4
-rw-r--r--src/library/scala/xml/dtd/impl/WordBerrySethi.scala (renamed from src/library/scala/util/automata/WordBerrySethi.scala)9
-rw-r--r--src/library/scala/xml/dtd/impl/WordExp.scala (renamed from src/library/scala/util/regexp/WordExp.scala)4
-rwxr-xr-xsrc/library/scala/xml/factory/Binder.scala2
-rw-r--r--src/library/scala/xml/factory/LoggedNodeFactory.scala10
-rw-r--r--src/library/scala/xml/factory/XMLLoader.scala4
-rw-r--r--src/library/scala/xml/include/sax/EncodingHeuristics.scala2
-rw-r--r--src/library/scala/xml/include/sax/Main.scala82
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala18
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala32
-rw-r--r--src/library/scala/xml/parsing/FactoryAdapter.scala10
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala220
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala31
-rw-r--r--src/library/scala/xml/parsing/ValidatingMarkupHandler.scala6
-rw-r--r--src/library/scala/xml/parsing/XhtmlParser.scala2
-rw-r--r--src/library/scala/xml/persistent/CachedFileStorage.scala12
-rw-r--r--src/library/scala/xml/persistent/SetStorage.scala6
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala6
-rw-r--r--src/library/scala/xml/transform/BasicTransformer.scala2
-rw-r--r--src/library/scala/xml/transform/RewriteRule.scala4
-rw-r--r--src/manual/scala/man1/scalac.scala24
-rw-r--r--src/manual/scala/tools/docutil/resources/index.html4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Assembly.java253
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java96
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Attribute.java654
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java169
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java75
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java48
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java54
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java82
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java45
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java32
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java58
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java119
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java141
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java40
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java9
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java57
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java47
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java81
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java158
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java198
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java116
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java69
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Module.java155
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java69
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEFile.java941
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEModule.java456
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEType.java419
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java72
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java76
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java62
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java45
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java104
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Type.java1142
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java190
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Version.java71
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala125
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala64
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala60
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala18
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala539
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala861
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala148
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala44
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala70
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala136
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala137
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala1948
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala1205
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala44
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala93
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala261
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala24
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala58
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java31
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java18
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java100
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java311
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java92
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java23
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java57
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java199
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java129
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/Table.java1859
-rw-r--r--src/partest/README1
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala5
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala5
-rw-r--r--src/partest/scala/tools/partest/JavapTest.scala26
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala2
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala10
-rw-r--r--src/partest/scala/tools/partest/SecurityTest.scala13
-rw-r--r--src/partest/scala/tools/partest/TestUtil.scala10
-rw-r--r--src/partest/scala/tools/partest/instrumented/Instrumentation.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala48
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala25
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala15
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala27
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala21
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala11
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala30
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala160
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerUtils.scala29
-rw-r--r--src/partest/scala/tools/partest/nest/TestFile.scala1
-rw-r--r--src/partest/scala/tools/partest/package.scala10
-rw-r--r--src/partest/scala/tools/partest/utils/PrintMgr.scala52
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala2
-rw-r--r--src/reflect/scala/reflect/api/ImplicitTags.scala108
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala10
-rw-r--r--src/reflect/scala/reflect/api/Names.scala34
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala28
-rw-r--r--src/reflect/scala/reflect/api/StandardNames.scala5
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala50
-rw-r--r--src/reflect/scala/reflect/api/Types.scala105
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala8
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala18
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala18
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala4
-rw-r--r--src/reflect/scala/reflect/internal/CapturedVariables.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala11
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala280
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala25
-rw-r--r--src/reflect/scala/reflect/internal/HasFlags.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala11
-rw-r--r--src/reflect/scala/reflect/internal/InfoTransformers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala10
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala26
-rw-r--r--src/reflect/scala/reflect/internal/Mode.scala (renamed from src/compiler/scala/tools/nsc/typechecker/Modes.scala)107
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala103
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala275
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala51
-rw-r--r--src/reflect/scala/reflect/internal/PrivateWithin.scala23
-rw-r--r--src/reflect/scala/reflect/internal/Required.scala7
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala125
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala13
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala243
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala73
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala652
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala47
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala158
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala126
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala3781
-rw-r--r--src/reflect/scala/reflect/internal/Variance.scala90
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala196
-rw-r--r--src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala24
-rw-r--r--src/reflect/scala/reflect/internal/pickling/PickleFormat.scala5
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala38
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala4
-rw-r--r--src/reflect/scala/reflect/internal/tpe/CommonOwners.scala50
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala592
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala617
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala282
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala1144
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala29
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala23
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala10
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala32
-rw-r--r--src/reflect/scala/reflect/internal/util/HashSet.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Origins.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala45
-rw-r--r--src/reflect/scala/reflect/internal/util/RangePosition.scala49
-rw-r--r--src/reflect/scala/reflect/internal/util/Set.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala6
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala1
-rw-r--r--src/reflect/scala/reflect/internal/util/StringOps.scala40
-rw-r--r--src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/TableDef.scala10
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala40
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/package.scala31
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala84
-rw-r--r--src/reflect/scala/reflect/io/Directory.scala15
-rw-r--r--src/reflect/scala/reflect/io/File.scala86
-rw-r--r--src/reflect/scala/reflect/io/NoAbstractFile.scala1
-rw-r--r--src/reflect/scala/reflect/io/Path.scala58
-rw-r--r--src/reflect/scala/reflect/io/PlainFile.scala22
-rw-r--r--src/reflect/scala/reflect/io/Streamable.scala19
-rw-r--r--src/reflect/scala/reflect/io/VirtualDirectory.scala17
-rw-r--r--src/reflect/scala/reflect/io/VirtualFile.scala29
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala87
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Context.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala65
-rw-r--r--src/reflect/scala/reflect/macros/Names.scala14
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Reifiers.scala26
-rw-r--r--src/reflect/scala/reflect/macros/Synthetics.scala106
-rw-r--r--src/reflect/scala/reflect/macros/TreeBuilder.scala19
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala2
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala122
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala12
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala12
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala6
-rw-r--r--src/reflect/scala/reflect/runtime/package.scala2
-rw-r--r--src/repl/scala/tools/nsc/Interpreter.scala (renamed from src/compiler/scala/tools/nsc/Interpreter.scala)0
-rw-r--r--src/repl/scala/tools/nsc/InterpreterLoop.scala (renamed from src/compiler/scala/tools/nsc/InterpreterLoop.scala)0
-rw-r--r--src/repl/scala/tools/nsc/MainGenericRunner.scala (renamed from src/compiler/scala/tools/nsc/MainGenericRunner.scala)5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala (renamed from src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ByteCode.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ByteCode.scala)31
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CommandLine.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CommandLine.scala)1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Completion.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Completion.scala)2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionAware.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala)30
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala)7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Delimited.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Delimited.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala)26
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Formatting.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Formatting.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ILoop.scala)381
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala (renamed from src/compiler/scala/tools/nsc/interpreter/IMain.scala)705
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ISettings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ISettings.scala)11
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Imports.scala)80
-rw-r--r--src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala)10
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala (renamed from src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala)32
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/JLineReader.scala)12
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala693
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Logger.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Logger.scala)4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/LoopCommands.scala (renamed from src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala)27
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala (renamed from src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala)101
-rw-r--r--src/repl/scala/tools/nsc/interpreter/NamedParam.scala (renamed from src/compiler/scala/tools/nsc/interpreter/NamedParam.scala)5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Naming.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Naming.scala)13
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Parsed.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Parsed.scala)9
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Pasted.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Pasted.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Phased.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Phased.scala)23
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Power.scala)136
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplConfig.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala)12
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplDir.scala48
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala)11
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplProps.scala)4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplReporter.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplStrings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplVals.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplVals.scala)1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Results.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Results.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/RichClass.scala (renamed from src/compiler/scala/tools/nsc/interpreter/RichClass.scala)5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/SimpleReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala)5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/StdReplTags.scala15
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala157
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/History.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/History.scala)6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala)6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/package.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/package.scala)0
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala5
-rw-r--r--src/scaladoc/scala/tools/ant/Scaladoc.scala (renamed from src/compiler/scala/tools/ant/Scaladoc.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/ScalaDoc.scala (renamed from src/compiler/scala/tools/nsc/ScalaDoc.scala)11
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/DocFactory.scala)20
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocParser.scala (renamed from src/compiler/scala/tools/nsc/doc/DocParser.scala)9
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Index.scala (renamed from src/compiler/scala/tools/nsc/doc/Index.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala234
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala49
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala (renamed from src/compiler/scala/tools/nsc/doc/Settings.scala)10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala (renamed from src/compiler/scala/tools/nsc/doc/Uncompilable.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Universe.scala (renamed from src/compiler/scala/tools/nsc/doc/Universe.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala (renamed from src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala)57
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala (renamed from src/compiler/scala/tools/nsc/doc/base/LinkTo.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala (renamed from src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala)2
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala (renamed from src/compiler/scala/tools/nsc/doc/base/comment/Body.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala (renamed from src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala)3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Generator.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Universer.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala (renamed from src/compiler/scala/tools/nsc/doc/html/Doclet.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala (renamed from src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Page.scala (renamed from src/compiler/scala/tools/nsc/doc/html/Page.scala)10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala (renamed from src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala)32
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Index.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala)1
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Source.scala)3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Template.scala)39
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala)17
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala)7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png)bin6232 -> 6232 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png)bin6220 -> 6220 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png)bin3357 -> 3357 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png)bin7516 -> 7516 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png)bin3910 -> 3910 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png)bin9006 -> 9006 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif)bin167 -> 167 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif)bin1544 -> 1544 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif)bin1341 -> 1341 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png)bin1692 -> 1692 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd)bin30823 -> 30823 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif)bin1462 -> 1462 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png)bin1803 -> 1803 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd)bin31295 -> 31295 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif)bin1324 -> 1324 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif)bin1104 -> 1104 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png)bin965 -> 965 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif)bin1366 -> 1366 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif)bin1115 -> 1115 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png)bin1198 -> 1198 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png)bin2441 -> 2441 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png)bin3356 -> 3356 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png)bin7653 -> 7653 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png)bin3903 -> 3903 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png)bin9158 -> 9158 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png)bin9200 -> 9200 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png)bin9158 -> 9158 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif)bin1145 -> 1145 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif)bin1118 -> 1118 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif)bin1145 -> 1145 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png)bin3335 -> 3335 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png)bin7312 -> 7312 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif)bin1201 -> 1201 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png)bin3186 -> 3186 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd)bin28904 -> 28904 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png)bin1150 -> 1150 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png)bin646 -> 646 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png)bin1380 -> 1380 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png)bin1864 -> 1864 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png)bin1434 -> 1434 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png)bin1965 -> 1965 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif)bin1214 -> 1214 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif)bin1209 -> 1209 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png)bin3374 -> 3374 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png)bin7410 -> 7410 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png)bin3882 -> 3882 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png)bin8967 -> 8967 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png)bin1445 -> 1445 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png)bin4236 -> 4236 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png)bin1841 -> 1841 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png)bin4969 -> 4969 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png)bin1879 -> 1879 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/Entity.scala)30
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala (renamed from src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala)191
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala)118
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala)11
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala (renamed from src/compiler/scala/tools/nsc/doc/model/Visibility.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala)11
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala)10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala)3
-rw-r--r--src/scaladoc/scala/tools/partest/ScaladocModelTest.scala (renamed from src/partest/scala/tools/partest/ScaladocModelTest.scala)12
-rw-r--r--src/scalap/decoder.properties2
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rule.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala52
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala9
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala1
-rw-r--r--src/swing/scala/swing/Button.scala3
-rw-r--r--src/swing/scala/swing/ButtonGroup.scala4
-rw-r--r--src/swing/scala/swing/ColorChooser.scala45
-rw-r--r--src/swing/scala/swing/ComboBox.scala2
-rw-r--r--src/swing/scala/swing/EditorPane.scala3
-rw-r--r--src/swing/scala/swing/Font.scala.disabled70
-rw-r--r--src/swing/scala/swing/FormattedTextField.scala4
-rw-r--r--src/swing/scala/swing/ListView.scala2
-rw-r--r--src/swing/scala/swing/MainFrame.scala4
-rw-r--r--src/swing/scala/swing/PasswordField.scala4
-rw-r--r--src/swing/scala/swing/PopupMenu.scala65
-rw-r--r--src/swing/scala/swing/ProgressBar.scala4
-rw-r--r--src/swing/scala/swing/Reactions.scala2
-rw-r--r--src/swing/scala/swing/SplitPane.scala3
-rw-r--r--src/swing/scala/swing/SwingWorker.scala23
-rw-r--r--src/swing/scala/swing/TabbedPane.scala3
-rw-r--r--src/swing/scala/swing/TextArea.scala4
-rw-r--r--src/swing/scala/swing/TextComponent.scala3
-rw-r--r--src/swing/scala/swing/ToggleButton.scala3
-rw-r--r--src/swing/scala/swing/Window.scala3
-rw-r--r--src/swing/scala/swing/event/ColorChanged.scala (renamed from src/swing/scala/swing/SwingActor.scala)5
-rw-r--r--src/swing/scala/swing/event/PopupMenuEvent.scala (renamed from src/library/scala/SpecializableCompanion.scala)16
-rw-r--r--test/attic/files/cli/test1/Main.check.j9vm54
-rw-r--r--test/attic/files/cli/test1/Main.check.java6
-rw-r--r--test/attic/files/cli/test1/Main.check.java56
-rw-r--r--test/attic/files/cli/test1/Main.check.java5_api19
-rw-r--r--test/attic/files/cli/test1/Main.check.java5_j94
-rw-r--r--test/attic/files/cli/test1/Main.check.javac19
-rw-r--r--test/attic/files/cli/test1/Main.check.javac524
-rw-r--r--test/attic/files/cli/test1/Main.check.javac629
-rw-r--r--test/attic/files/cli/test1/Main.check.jikes3
-rw-r--r--test/attic/files/cli/test1/Main.check.jikes53
-rw-r--r--test/attic/files/cli/test1/Main.check.scala24
-rw-r--r--test/attic/files/cli/test1/Main.check.scala_api33
-rw-r--r--test/attic/files/cli/test1/Main.check.scala_j915
-rw-r--r--test/attic/files/cli/test1/Main.check.scalac63
-rw-r--r--test/attic/files/cli/test1/Main.check.scalaint45
-rw-r--r--test/attic/files/cli/test1/Main.java8
-rw-r--r--test/attic/files/cli/test1/Main.scala8
-rw-r--r--test/attic/files/cli/test2/Main.check.j9vm54
-rw-r--r--test/attic/files/cli/test2/Main.check.java6
-rw-r--r--test/attic/files/cli/test2/Main.check.java56
-rw-r--r--test/attic/files/cli/test2/Main.check.java5_api24
-rw-r--r--test/attic/files/cli/test2/Main.check.java5_j936
-rw-r--r--test/attic/files/cli/test2/Main.check.javac27
-rw-r--r--test/attic/files/cli/test2/Main.check.javac528
-rw-r--r--test/attic/files/cli/test2/Main.check.javac633
-rw-r--r--test/attic/files/cli/test2/Main.check.jikes9
-rw-r--r--test/attic/files/cli/test2/Main.check.jikes59
-rw-r--r--test/attic/files/cli/test2/Main.check.scala24
-rw-r--r--test/attic/files/cli/test2/Main.check.scala_api37
-rw-r--r--test/attic/files/cli/test2/Main.check.scala_j915
-rw-r--r--test/attic/files/cli/test2/Main.check.scalac63
-rw-r--r--test/attic/files/cli/test2/Main.check.scalaint45
-rw-r--r--test/attic/files/cli/test2/Main.java8
-rw-r--r--test/attic/files/cli/test2/Main.scala8
-rw-r--r--test/attic/files/cli/test3/Main.check.j9vm55
-rw-r--r--test/attic/files/cli/test3/Main.check.java10
-rw-r--r--test/attic/files/cli/test3/Main.check.java510
-rw-r--r--test/attic/files/cli/test3/Main.check.java5_api29
-rw-r--r--test/attic/files/cli/test3/Main.check.java5_j936
-rw-r--r--test/attic/files/cli/test3/Main.check.javac33
-rw-r--r--test/attic/files/cli/test3/Main.check.javac531
-rw-r--r--test/attic/files/cli/test3/Main.check.javac636
-rw-r--r--test/attic/files/cli/test3/Main.check.jikes14
-rw-r--r--test/attic/files/cli/test3/Main.check.jikes514
-rw-r--r--test/attic/files/cli/test3/Main.check.scala28
-rw-r--r--test/attic/files/cli/test3/Main.check.scala_api41
-rw-r--r--test/attic/files/cli/test3/Main.check.scala_j919
-rw-r--r--test/attic/files/cli/test3/Main.check.scalac63
-rw-r--r--test/attic/files/cli/test3/Main.check.scalaint48
-rw-r--r--test/attic/files/cli/test3/Main.java10
-rw-r--r--test/attic/files/cli/test3/Main.scala10
-rw-r--r--test/disabled/buildmanager/overloaded_1/A.scala (renamed from test/files/disabled/A.scala)0
-rw-r--r--test/disabled/buildmanager/overloaded_1/overloaded_1.check (renamed from test/files/disabled/overloaded_1.check)0
-rw-r--r--test/disabled/buildmanager/overloaded_1/overloaded_1.test (renamed from test/files/disabled/overloaded_1.test)0
-rw-r--r--test/disabled/buildmanager/t4245/A.scala (renamed from test/files/disabled/t4245/A.scala)0
-rw-r--r--test/disabled/buildmanager/t4245/t4245.check (renamed from test/files/disabled/t4245/t4245.check)0
-rw-r--r--test/disabled/buildmanager/t4245/t4245.test (renamed from test/files/disabled/t4245/t4245.test)0
-rw-r--r--test/disabled/pos/spec-List.scala2
-rw-r--r--test/disabled/presentation/akka.flags4
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala16
-rw-r--r--test/disabled/presentation/simple-tests.check2
-rw-r--r--test/disabled/presentation/simple-tests.opts4
-rw-r--r--test/disabled/run/t4146.scala (renamed from test/files/run/t4146.scala)2
-rw-r--r--test/files/ant/imported.xml5
-rw-r--r--test/files/bench/equality/eqeq.eqlog84
-rw-r--r--test/files/buildmanager/annotated/A.scala1
-rw-r--r--test/files/buildmanager/annotated/annotated.check6
-rw-r--r--test/files/buildmanager/annotated/annotated.test2
-rw-r--r--test/files/buildmanager/freshnames/A.scala16
-rw-r--r--test/files/buildmanager/freshnames/B.scala4
-rw-r--r--test/files/buildmanager/freshnames/freshnames.check6
-rw-r--r--test/files/buildmanager/freshnames/freshnames.test2
-rw-r--r--test/files/buildmanager/infer/A.scala16
-rw-r--r--test/files/buildmanager/infer/infer.check6
-rw-r--r--test/files/buildmanager/infer/infer.test2
-rw-r--r--test/files/buildmanager/namesdefaults/defparam-use.scala5
-rw-r--r--test/files/buildmanager/namesdefaults/defparam.scala7
-rw-r--r--test/files/buildmanager/namesdefaults/namesdefaults.check9
-rw-r--r--test/files/buildmanager/namesdefaults/namesdefaults.test3
-rw-r--r--test/files/buildmanager/simpletest/A.scala3
-rw-r--r--test/files/buildmanager/simpletest/B.scala3
-rw-r--r--test/files/buildmanager/simpletest/simpletest.changes/A1.scala1
-rw-r--r--test/files/buildmanager/simpletest/simpletest.check11
-rw-r--r--test/files/buildmanager/simpletest/simpletest.test3
-rw-r--r--test/files/buildmanager/t2280/A.scala1
-rw-r--r--test/files/buildmanager/t2280/B.java2
-rw-r--r--test/files/buildmanager/t2280/t2280.check6
-rw-r--r--test/files/buildmanager/t2280/t2280.test2
-rw-r--r--test/files/buildmanager/t2556_1/A.scala3
-rw-r--r--test/files/buildmanager/t2556_1/B.scala3
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.check12
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.test3
-rw-r--r--test/files/buildmanager/t2556_2/A.scala4
-rw-r--r--test/files/buildmanager/t2556_2/B.scala2
-rw-r--r--test/files/buildmanager/t2556_2/C.scala4
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.check13
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.test3
-rw-r--r--test/files/buildmanager/t2556_3/A.scala5
-rw-r--r--test/files/buildmanager/t2556_3/B.scala5
-rw-r--r--test/files/buildmanager/t2556_3/C.scala2
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.check18
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.test3
-rw-r--r--test/files/buildmanager/t2557/A.scala4
-rw-r--r--test/files/buildmanager/t2557/B.scala4
-rw-r--r--test/files/buildmanager/t2557/C.scala3
-rw-r--r--test/files/buildmanager/t2557/D.scala1
-rw-r--r--test/files/buildmanager/t2557/E.scala1
-rw-r--r--test/files/buildmanager/t2557/F.scala4
-rw-r--r--test/files/buildmanager/t2557/t2557.changes/D2.scala2
-rw-r--r--test/files/buildmanager/t2557/t2557.check10
-rw-r--r--test/files/buildmanager/t2557/t2557.test3
-rw-r--r--test/files/buildmanager/t2559/A.scala5
-rw-r--r--test/files/buildmanager/t2559/D.scala4
-rw-r--r--test/files/buildmanager/t2559/t2559.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2559/t2559.check9
-rw-r--r--test/files/buildmanager/t2559/t2559.test3
-rw-r--r--test/files/buildmanager/t2562/A.scala7
-rw-r--r--test/files/buildmanager/t2562/B.scala8
-rw-r--r--test/files/buildmanager/t2562/t2562.changes/A2.scala8
-rw-r--r--test/files/buildmanager/t2562/t2562.check12
-rw-r--r--test/files/buildmanager/t2562/t2562.test3
-rw-r--r--test/files/buildmanager/t2649/A.scala3
-rw-r--r--test/files/buildmanager/t2649/B.scala4
-rw-r--r--test/files/buildmanager/t2649/t2649.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2649/t2649.check9
-rw-r--r--test/files/buildmanager/t2649/t2649.test3
-rw-r--r--test/files/buildmanager/t2650_1/A.scala4
-rw-r--r--test/files/buildmanager/t2650_1/B.scala3
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.check12
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.test3
-rw-r--r--test/files/buildmanager/t2650_2/A.scala3
-rw-r--r--test/files/buildmanager/t2650_2/B.scala4
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.check14
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.test3
-rw-r--r--test/files/buildmanager/t2650_3/A.scala4
-rw-r--r--test/files/buildmanager/t2650_3/B.scala3
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.check14
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.test3
-rw-r--r--test/files/buildmanager/t2650_4/A.scala5
-rw-r--r--test/files/buildmanager/t2650_4/B.scala3
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.check14
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.test3
-rw-r--r--test/files/buildmanager/t2651_2/A.scala1
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala1
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.check6
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.test3
-rw-r--r--test/files/buildmanager/t2651_3/A.scala3
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.check6
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.test3
-rw-r--r--test/files/buildmanager/t2651_4/A.scala5
-rw-r--r--test/files/buildmanager/t2651_4/B.scala3
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.check13
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.test3
-rw-r--r--test/files/buildmanager/t2653/A.scala2
-rw-r--r--test/files/buildmanager/t2653/B.scala3
-rw-r--r--test/files/buildmanager/t2653/t2653.changes/A2.scala2
-rw-r--r--test/files/buildmanager/t2653/t2653.check15
-rw-r--r--test/files/buildmanager/t2653/t2653.test3
-rw-r--r--test/files/buildmanager/t2654/A.scala2
-rw-r--r--test/files/buildmanager/t2654/B.scala1
-rw-r--r--test/files/buildmanager/t2654/t2654.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2654/t2654.check6
-rw-r--r--test/files/buildmanager/t2654/t2654.test3
-rw-r--r--test/files/buildmanager/t2655/A.scala4
-rw-r--r--test/files/buildmanager/t2655/B.scala3
-rw-r--r--test/files/buildmanager/t2655/t2655.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2655/t2655.check13
-rw-r--r--test/files/buildmanager/t2655/t2655.test3
-rw-r--r--test/files/buildmanager/t2657/A.scala3
-rw-r--r--test/files/buildmanager/t2657/B.scala4
-rw-r--r--test/files/buildmanager/t2657/t2657.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2657/t2657.check14
-rw-r--r--test/files/buildmanager/t2657/t2657.test3
-rw-r--r--test/files/buildmanager/t2789/A.scala5
-rw-r--r--test/files/buildmanager/t2789/B.scala3
-rw-r--r--test/files/buildmanager/t2789/t2789.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2789/t2789.check11
-rw-r--r--test/files/buildmanager/t2789/t2789.test3
-rw-r--r--test/files/buildmanager/t2790/A.scala5
-rw-r--r--test/files/buildmanager/t2790/B.scala4
-rw-r--r--test/files/buildmanager/t2790/t2790.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2790/t2790.check13
-rw-r--r--test/files/buildmanager/t2790/t2790.test3
-rw-r--r--test/files/buildmanager/t2792/A1.scala3
-rw-r--r--test/files/buildmanager/t2792/A2.scala4
-rw-r--r--test/files/buildmanager/t2792/A3.scala3
-rw-r--r--test/files/buildmanager/t2792/t2792.changes/A1_1.scala3
-rw-r--r--test/files/buildmanager/t2792/t2792.check14
-rw-r--r--test/files/buildmanager/t2792/t2792.test3
-rw-r--r--test/files/buildmanager/t3045/A.java7
-rw-r--r--test/files/buildmanager/t3045/t3045.check3
-rw-r--r--test/files/buildmanager/t3045/t3045.test1
-rw-r--r--test/files/buildmanager/t3054/bar/Bar.java7
-rw-r--r--test/files/buildmanager/t3054/foo/Foo.scala5
-rw-r--r--test/files/buildmanager/t3054/t3054.check3
-rw-r--r--test/files/buildmanager/t3054/t3054.test1
-rw-r--r--test/files/buildmanager/t3059/A.scala4
-rw-r--r--test/files/buildmanager/t3059/B.scala4
-rw-r--r--test/files/buildmanager/t3059/t3059.check6
-rw-r--r--test/files/buildmanager/t3059/t3059.test2
-rw-r--r--test/files/buildmanager/t3133/A.java7
-rw-r--r--test/files/buildmanager/t3133/t3133.check3
-rw-r--r--test/files/buildmanager/t3133/t3133.test1
-rw-r--r--test/files/buildmanager/t3140/A.scala8
-rw-r--r--test/files/buildmanager/t3140/t3140.check6
-rw-r--r--test/files/buildmanager/t3140/t3140.test2
-rw-r--r--test/files/buildmanager/t4215/A.scala5
-rw-r--r--test/files/buildmanager/t4215/t4215.check6
-rw-r--r--test/files/buildmanager/t4215/t4215.test2
-rw-r--r--test/files/continuations-run/implicit-infer-annotations.check5
-rw-r--r--test/files/continuations-run/implicit-infer-annotations.scala59
-rw-r--r--test/files/detach-neg/det_bar.check4
-rw-r--r--test/files/detach-neg/det_bar.scala13
-rw-r--r--test/files/detach-run/actor-run.check5
-rw-r--r--test/files/detach-run/actor/Client.scala54
-rw-r--r--test/files/detach-run/actor/Server.scala27
-rw-r--r--test/files/detach-run/actor/ServerConsole.scala75
-rw-r--r--test/files/detach-run/actor/actor.flags1
-rw-r--r--test/files/detach-run/actor/actor.scala157
-rw-r--r--test/files/detach-run/actor/java.policy25
-rw-r--r--test/files/detach-run/basic-run.check5
-rw-r--r--test/files/detach-run/basic/Client.scala48
-rw-r--r--test/files/detach-run/basic/Server.scala22
-rw-r--r--test/files/detach-run/basic/ServerConsole.scala83
-rw-r--r--test/files/detach-run/basic/basic.flags1
-rw-r--r--test/files/detach-run/basic/basic.scala169
-rw-r--r--test/files/detach-run/basic/java.policy26
-rw-r--r--test/files/instrumented/InstrumentationTest.check2
-rw-r--r--test/files/jvm/duration-tck.scala5
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala2
-rw-r--r--test/files/jvm/interpreter.check6
-rw-r--r--test/files/jvm/interpreter.scala2
-rw-r--r--test/files/jvm/manifests-new.scala34
-rw-r--r--test/files/jvm/manifests-old.scala34
-rw-r--r--test/files/jvm/named-args-in-order.check3
-rw-r--r--test/files/jvm/named-args-in-order/SameBytecode.scala9
-rw-r--r--test/files/jvm/named-args-in-order/Test.scala10
-rw-r--r--test/files/jvm/non-fatal-tests.scala6
-rw-r--r--test/files/jvm/nooptimise/Foo_1.flags1
-rw-r--r--test/files/jvm/nooptimise/Foo_1.scala8
-rw-r--r--test/files/jvm/nooptimise/Test.scala23
-rw-r--r--test/files/jvm/serialization.scala15
-rw-r--r--test/files/jvm/t1143-2/t1143-2.scala26
-rw-r--r--test/files/jvm/t1143.scala12
-rw-r--r--test/files/jvm/t1342/SI.scala2
-rw-r--r--test/files/jvm/t1600.scala3
-rw-r--r--test/files/jvm/t2163/t2163.java (renamed from test/files/jvm/ticket2163/ticket2163.java)4
-rw-r--r--test/files/jvm/t2163/t2163.scala5
-rw-r--r--test/files/jvm/t2470.cmds3
-rw-r--r--test/files/jvm/t2570/Test.scala2
-rw-r--r--test/files/jvm/t3003.cmds2
-rw-r--r--test/files/jvm/t3415/HelloWorld.scala2
-rw-r--r--test/files/jvm/t4283/AbstractFoo.java (renamed from test/files/jvm/ticket4283/AbstractFoo.java)0
-rw-r--r--test/files/jvm/t4283/ScalaBipp.scala (renamed from test/files/jvm/ticket4283/ScalaBipp.scala)0
-rw-r--r--test/files/jvm/t4283/Test.scala (renamed from test/files/jvm/ticket4283/Test.scala)0
-rw-r--r--test/files/jvm/t7006/Foo_1.flags1
-rw-r--r--test/files/jvm/t7006/Foo_1.scala10
-rw-r--r--test/files/jvm/t7006/Test.scala19
-rw-r--r--test/files/jvm/t7181/Foo_1.scala26
-rw-r--r--test/files/jvm/t7181/Test.scala24
-rw-r--r--test/files/jvm/ticket2163/ticket2163.scala5
-rw-r--r--test/files/jvm/typerep.scala26
-rw-r--r--test/files/jvm/unreachable/Foo_1.flags1
-rw-r--r--test/files/jvm/unreachable/Foo_1.scala110
-rw-r--r--test/files/jvm/unreachable/Test.scala23
-rw-r--r--test/files/jvm/value-class-boxing.check7
-rw-r--r--test/files/jvm/value-class-boxing/Analyzed_1.scala17
-rw-r--r--test/files/jvm/value-class-boxing/test.scala15
-rw-r--r--test/files/jvm/xmlattr.scala7
-rw-r--r--test/files/lib/jsoup-1.3.1.jar.desired.sha11
-rw-r--r--test/files/neg/abstract-inaccessible.check10
-rw-r--r--test/files/neg/ambiguous-float-dots.check23
-rw-r--r--test/files/neg/annot-nonconst.check2
-rw-r--r--test/files/neg/anyval-anyref-parent.check2
-rw-r--r--test/files/neg/array-not-seq.check13
-rw-r--r--test/files/neg/array-not-seq.flags1
-rw-r--r--test/files/neg/array-not-seq.scala26
-rw-r--r--test/files/neg/case-collision.check10
-rw-r--r--test/files/neg/catch-all.check10
-rw-r--r--test/files/neg/check-dead.check12
-rw-r--r--test/files/neg/checksensible.check70
-rw-r--r--test/files/neg/classmanifests_new_deprecations.check24
-rw-r--r--test/files/neg/cycle-bounds.check4
-rw-r--r--test/files/neg/cycle-bounds.flags1
-rw-r--r--test/files/neg/cycle-bounds.scala5
-rw-r--r--test/files/neg/cyclics-import.check11
-rw-r--r--test/files/neg/dbldef.check4
-rw-r--r--test/files/neg/eta-expand-star-deprecation.check4
-rw-r--r--test/files/neg/eta-expand-star-deprecation.flags1
-rw-r--r--test/files/neg/eta-expand-star-deprecation.scala8
-rw-r--r--test/files/neg/exhausting.check16
-rw-r--r--test/files/neg/gadts1.check7
-rw-r--r--test/files/neg/import-precedence.check19
-rw-r--r--test/files/neg/import-precedence.scala68
-rw-r--r--test/files/neg/javaConversions-2.10-ambiguity.check6
-rw-r--r--test/files/neg/logImplicits.check4
-rw-r--r--test/files/neg/lubs.check9
-rw-r--r--test/files/neg/macro-basic-mamdmi.check3
-rw-r--r--test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala6
-rw-r--r--test/files/neg/macro-deprecate-idents.check38
-rw-r--r--test/files/neg/macro-invalidimpl-f/Impls_1.scala2
-rw-r--r--test/files/neg/macro-invalidimpl-g/Impls_1.scala2
-rw-r--r--test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala6
-rw-r--r--test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala2
-rw-r--r--test/files/neg/macro-override-macro-overrides-abstract-method-a.check2
-rw-r--r--test/files/neg/macro-override-macro-overrides-abstract-method-b.check2
-rw-r--r--test/files/neg/macro-override-method-overrides-macro.check2
-rw-r--r--test/files/neg/macro-override-method-overrides-macro/Impls_1.scala2
-rw-r--r--test/files/neg/macro-without-xmacros-a/Impls_1.scala6
-rw-r--r--test/files/neg/macro-without-xmacros-b/Impls_1.scala6
-rw-r--r--test/files/neg/main1.check14
-rw-r--r--test/files/neg/migration28.check4
-rw-r--r--test/files/neg/names-defaults-neg-warn.check8
-rw-r--r--test/files/neg/names-defaults-neg.check2
-rw-r--r--test/files/neg/nested-annotation.check10
-rw-r--r--test/files/neg/nested-annotation.scala9
-rw-r--r--test/files/neg/newpat_unreachable.check18
-rw-r--r--test/files/neg/nonlocal-warning.check9
-rw-r--r--test/files/neg/nonlocal-warning.flags (renamed from test/pending/pos/t4649.flags)0
-rw-r--r--test/files/neg/nonlocal-warning.scala18
-rw-r--r--test/files/neg/nullary-override.check4
-rw-r--r--test/files/neg/overloaded-implicit.check8
-rw-r--r--test/files/neg/overloaded-implicit.flags2
-rw-r--r--test/files/neg/package-ob-case.check4
-rw-r--r--test/files/neg/pat_unreachable.check19
-rw-r--r--test/files/neg/pat_unreachable.flags2
-rw-r--r--test/files/neg/patmat-type-check.check17
-rw-r--r--test/files/neg/patmatexhaust.check24
-rw-r--r--test/files/neg/permanent-blindness.check10
-rw-r--r--test/files/neg/predef-masking.scala2
-rw-r--r--test/files/neg/protected-constructors.check5
-rw-r--r--test/files/neg/sealed-final-neg.check4
-rw-r--r--test/files/neg/sealed-final-neg.flags1
-rw-r--r--test/files/neg/sealed-final-neg.scala41
-rw-r--r--test/files/neg/sealed-java-enums.check4
-rw-r--r--test/files/neg/serialversionuid-not-const.check10
-rw-r--r--test/files/neg/serialversionuid-not-const.scala16
-rw-r--r--test/files/neg/stmt-expr-discard.check8
-rw-r--r--test/files/neg/switch.check8
-rw-r--r--test/files/neg/t1010.scala4
-rw-r--r--test/files/neg/t1224.check2
-rw-r--r--test/files/neg/t1224.flags1
-rw-r--r--test/files/neg/t2148.check2
-rw-r--r--test/files/neg/t2442.check8
-rw-r--r--test/files/neg/t2641.check12
-rw-r--r--test/files/neg/t2796.check4
-rw-r--r--test/files/neg/t3098.check4
-rw-r--r--test/files/neg/t3160ambiguous.check7
-rw-r--r--test/files/neg/t3160ambiguous.scala15
-rw-r--r--test/files/neg/t3224.check28
-rwxr-xr-xtest/files/neg/t3224.scala48
-rw-r--r--test/files/neg/t3234.check6
-rw-r--r--test/files/neg/t3234.flags2
-rw-r--r--test/files/neg/t3683a.check4
-rw-r--r--test/files/neg/t3692-new.check11
-rw-r--r--test/files/neg/t3692-new.flags2
-rw-r--r--test/files/neg/t3692-old.check14
-rw-r--r--test/files/neg/t3692-old.flags1
-rw-r--r--test/files/neg/t3692-old.scala19
-rw-r--r--test/files/neg/t409.check4
-rw-r--r--test/files/neg/t414.scala2
-rw-r--r--test/files/neg/t421.check2
-rw-r--r--test/files/neg/t421.scala2
-rw-r--r--test/files/neg/t4271.scala4
-rw-r--r--test/files/neg/t4302.check4
-rw-r--r--test/files/neg/t4440.check12
-rw-r--r--test/files/neg/t4460a.check4
-rw-r--r--test/files/neg/t4460a.scala7
-rw-r--r--test/files/neg/t4460b.check4
-rw-r--r--test/files/neg/t4460b.scala9
-rw-r--r--test/files/neg/t4460c.check7
-rw-r--r--test/files/neg/t4460c.scala7
-rw-r--r--test/files/neg/t4537.check4
-rw-r--r--test/files/neg/t4537/c.scala8
-rw-r--r--test/files/neg/t4691_exhaust_extractor.check10
-rw-r--r--test/files/neg/t4749.check16
-rw-r--r--test/files/neg/t4762.check8
-rw-r--r--test/files/neg/t4851.check20
-rw-r--r--test/files/neg/t5182.check7
-rw-r--r--test/files/neg/t5182.flags1
-rw-r--r--test/files/neg/t5182.scala5
-rw-r--r--test/files/neg/t5353.check4
-rw-r--r--test/files/neg/t5353.scala3
-rw-r--r--test/files/neg/t5426.check12
-rw-r--r--test/files/neg/t5440.check4
-rw-r--r--test/files/neg/t5529.check5
-rw-r--r--test/files/neg/t5663-badwarneq.check18
-rw-r--r--test/files/neg/t5696.check2
-rw-r--r--test/files/neg/t5753.check3
-rw-r--r--test/files/neg/t5762.check12
-rw-r--r--test/files/neg/t5830.check8
-rw-r--r--test/files/neg/t5954.check18
-rw-r--r--test/files/neg/t5954.flags1
-rw-r--r--test/files/neg/t5954.scala46
-rw-r--r--test/files/neg/t5956.check21
-rw-r--r--test/files/neg/t5956.scala4
-rw-r--r--test/files/neg/t6011.check10
-rw-r--r--test/files/neg/t6048.check12
-rw-r--r--test/files/neg/t6083.check10
-rw-r--r--test/files/neg/t6083.scala7
-rw-r--r--test/files/neg/t6162-inheritance.check16
-rw-r--r--test/files/neg/t6162-overriding.check8
-rw-r--r--test/files/neg/t6264.check4
-rw-r--r--test/files/neg/t6276.check16
-rw-r--r--test/files/neg/t6355.check7
-rw-r--r--test/files/neg/t6355.scala19
-rw-r--r--test/files/neg/t6375.check27
-rw-r--r--test/files/neg/t6375.flags1
-rw-r--r--test/files/neg/t6375.scala67
-rw-r--r--test/files/neg/t6406-regextract.check7
-rw-r--r--test/files/neg/t6406-regextract.flags1
-rw-r--r--test/files/neg/t6406-regextract.scala5
-rwxr-xr-xtest/files/neg/t6446-additional.check31
-rw-r--r--test/files/neg/t6446-additional/ploogin_1.scala31
-rw-r--r--test/files/neg/t6446-additional/sample_2.flags1
-rw-r--r--test/files/neg/t6446-additional/sample_2.scala6
-rw-r--r--test/files/neg/t6446-additional/scalac-plugin.xml4
-rwxr-xr-xtest/files/neg/t6446-list.check1
-rw-r--r--test/files/neg/t6446-list/ploogin_1.scala31
-rw-r--r--test/files/neg/t6446-list/sample_2.flags1
-rw-r--r--test/files/neg/t6446-list/sample_2.scala6
-rw-r--r--test/files/neg/t6446-list/scalac-plugin.xml4
-rwxr-xr-xtest/files/neg/t6446-missing.check31
-rw-r--r--test/files/neg/t6446-missing/sample_2.flags1
-rw-r--r--test/files/neg/t6446-missing/sample_2.scala6
-rw-r--r--test/files/neg/t6446-missing/scalac-plugin.xml4
-rw-r--r--test/files/neg/t6446-show-phases.check30
-rw-r--r--test/files/neg/t6446-show-phases.flags1
-rw-r--r--test/files/neg/t6446-show-phases.scala3
-rw-r--r--test/files/neg/t6566a.check4
-rw-r--r--test/files/neg/t6566a.scala17
-rw-r--r--test/files/neg/t6566b.check4
-rw-r--r--test/files/neg/t6566b.scala19
-rw-r--r--test/files/neg/t6567.check8
-rw-r--r--test/files/neg/t6667.check3
-rw-r--r--test/files/neg/t6667.flags1
-rw-r--r--test/files/neg/t6667b.check3
-rw-r--r--test/files/neg/t6667b.flags1
-rw-r--r--test/files/neg/t667.check4
-rw-r--r--test/files/neg/t6675-old-patmat.check4
-rw-r--r--test/files/neg/t6675-old-patmat.flags1
-rw-r--r--test/files/neg/t6675-old-patmat.scala13
-rw-r--r--test/files/neg/t6675.check4
-rw-r--r--test/files/neg/t6902.check10
-rw-r--r--test/files/neg/t6963a.check4
-rw-r--r--test/files/neg/t6963b.check13
-rw-r--r--test/files/neg/t6963b.flags1
-rw-r--r--test/files/neg/t6963b.scala20
-rw-r--r--test/files/neg/t7171.check9
-rw-r--r--test/files/neg/t7171b.check12
-rw-r--r--test/files/neg/t877.check4
-rw-r--r--test/files/neg/unchecked-abstract.check20
-rw-r--r--test/files/neg/unchecked-impossible.check8
-rw-r--r--test/files/neg/unchecked-knowable.check8
-rw-r--r--test/files/neg/unchecked-refinement.check12
-rw-r--r--test/files/neg/unchecked-suppress.check10
-rw-r--r--test/files/neg/unchecked.check16
-rw-r--r--test/files/neg/unchecked2.check32
-rw-r--r--test/files/neg/unchecked3.check31
-rw-r--r--test/files/neg/unit-returns-value.check14
-rw-r--r--test/files/neg/unit-returns-value.scala23
-rw-r--r--test/files/neg/unreachablechar.check7
-rw-r--r--test/files/neg/unreachablechar.flags2
-rw-r--r--test/files/neg/variances-refinement.check22
-rw-r--r--test/files/neg/variances-refinement.scala40
-rw-r--r--test/files/neg/variances.check5
-rw-r--r--test/files/neg/variances2.check229
-rw-r--r--test/files/neg/variances2.scala303
-rw-r--r--test/files/neg/virtpatmat_reach_null.check4
-rw-r--r--test/files/neg/virtpatmat_reach_sealed_unsealed.check12
-rw-r--r--test/files/neg/virtpatmat_unreach_select.check4
-rw-r--r--test/files/neg/warn-inferred-any.check12
-rw-r--r--test/files/neg/warn-inferred-any.flags1
-rw-r--r--test/files/neg/warn-inferred-any.scala19
-rw-r--r--test/files/neg/warn-unused-imports.check44
-rw-r--r--test/files/neg/warn-unused-imports.flags1
-rw-r--r--test/files/neg/warn-unused-imports.scala125
-rw-r--r--test/files/neg/warn-unused-privates.check63
-rw-r--r--test/files/neg/warn-unused-privates.flags1
-rw-r--r--test/files/neg/warn-unused-privates.scala105
-rw-r--r--test/files/pos/CustomGlobal.scala2
-rw-r--r--test/files/pos/List1.scala6
-rw-r--r--test/files/pos/annotated-treecopy/Impls_Macros_1.scala2
-rw-r--r--test/files/pos/annotations.scala2
-rw-r--r--test/files/pos/annotations2.scala31
-rw-r--r--test/files/pos/attachments-typed-another-ident.check (renamed from test/pending/run/t5418.check)0
-rw-r--r--test/files/pos/attachments-typed-another-ident.flags (renamed from test/files/neg/t5692a.flags)0
-rw-r--r--test/files/pos/attachments-typed-another-ident/Impls_1.scala17
-rw-r--r--test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala5
-rw-r--r--test/files/pos/attachments-typed-ident/Impls_1.scala2
-rw-r--r--test/files/pos/attributes.scala2
-rw-r--r--test/files/pos/chang/Test.scala2
-rw-r--r--test/files/pos/cycle-jsoup.flags1
-rw-r--r--test/files/pos/cycle-jsoup.scala5
-rw-r--r--test/files/pos/cycle.flags1
-rw-r--r--test/files/pos/cycle/J_1.java16
-rw-r--r--test/files/pos/cycle/X_2.scala3
-rw-r--r--test/files/pos/depmet_implicit_chaining_zw.scala6
-rw-r--r--test/files/pos/depmet_implicit_norm_ret.scala20
-rw-r--r--test/files/pos/depmet_implicit_oopsla_session_simpler.scala2
-rw-r--r--test/files/pos/exhaust_2.scala (renamed from test/pending/pos/exhaust_2.scala)0
-rw-r--r--test/files/pos/implicits-new.scala8
-rw-r--r--test/files/pos/implicits-old.scala40
-rw-r--r--test/files/pos/infer2-pos.scala2
-rw-r--r--test/files/pos/javaConversions-2.10-ambiguity.scala (renamed from test/files/neg/javaConversions-2.10-ambiguity.scala)2
-rw-r--r--test/files/pos/javaConversions-2.10-regression.scala6
-rw-r--r--test/files/pos/kinds.scala13
-rw-r--r--test/files/pos/liftcode_polymorphic.scala2
-rw-r--r--test/files/pos/no-widen-locals.scala (renamed from test/pending/pos/no-widen-locals.scala)0
-rw-r--r--test/files/pos/relax_implicit_divergence.scala6
-rw-r--r--test/files/pos/sealed-final.flags1
-rw-r--r--test/files/pos/sealed-final.scala14
-rw-r--r--test/files/pos/simple-exceptions.scala2
-rw-r--r--test/files/pos/spec-Function1.scala2
-rw-r--r--test/files/pos/spec-annotations.scala2
-rw-r--r--test/files/pos/spec-arrays.scala41
-rw-r--r--test/files/pos/spec-asseenfrom.scala6
-rw-r--r--test/files/pos/spec-cyclic.scala10
-rw-r--r--test/files/pos/spec-funs.scala9
-rw-r--r--test/files/pos/spec-sealed.scala8
-rw-r--r--test/files/pos/spec-sparsearray-new.scala16
-rw-r--r--test/files/pos/spec-sparsearray-old.scala14
-rw-r--r--test/files/pos/spec-traits.scala12
-rw-r--r--test/files/pos/super.cmds2
-rw-r--r--test/files/pos/t0031.scala6
-rw-r--r--test/files/pos/t0227.scala4
-rw-r--r--test/files/pos/t0301.scala2
-rw-r--r--test/files/pos/t0422.scala3
-rw-r--r--test/files/pos/t0851.scala14
-rw-r--r--test/files/pos/t0872.scala8
-rw-r--r--test/files/pos/t1029.cmds2
-rw-r--r--test/files/pos/t1107a.scala (renamed from test/files/pos/t1107.scala)0
-rw-r--r--test/files/pos/t1203a.scala (renamed from test/files/pos/t1203.scala)0
-rw-r--r--test/files/pos/t1230/S.scala2
-rw-r--r--test/files/pos/t1231/S.scala2
-rw-r--r--test/files/pos/t1385.scala4
-rw-r--r--test/files/pos/t1439.flags2
-rw-r--r--test/files/pos/t1751/A1_2.scala (renamed from test/pending/pos/t1751/A1_2.scala)0
-rw-r--r--test/files/pos/t1751/A2_1.scala (renamed from test/pending/pos/t1751/A2_1.scala)0
-rw-r--r--test/files/pos/t1751/SuiteClasses.java (renamed from test/pending/pos/t1751/SuiteClasses.java)0
-rw-r--r--test/files/pos/t1782/Ann.java (renamed from test/pending/pos/t1782/Ann.java)0
-rw-r--r--test/files/pos/t1782/Days.java (renamed from test/pending/pos/t1782/Days.java)0
-rw-r--r--test/files/pos/t1782/ImplementedBy.java (renamed from test/pending/pos/t1782/ImplementedBy.java)0
-rw-r--r--test/files/pos/t1782/Test_1.scala (renamed from test/pending/pos/t1782/Test_1.scala)0
-rw-r--r--test/files/pos/t1942.cmds2
-rw-r--r--test/files/pos/t2331.scala4
-rw-r--r--test/files/pos/t2421.scala14
-rw-r--r--test/files/pos/t2421b_pos.scala2
-rwxr-xr-xtest/files/pos/t2429.scala10
-rw-r--r--test/files/pos/t2464.cmds3
-rwxr-xr-xtest/files/pos/t2484.scala4
-rw-r--r--test/files/pos/t2698.scala3
-rw-r--r--test/files/pos/t2726.cmds2
-rw-r--r--test/files/pos/t2797.scala4
-rw-r--r--test/files/pos/t294/Ann.java (renamed from test/pending/pos/t294/Ann.java)0
-rw-r--r--test/files/pos/t294/Ann2.java (renamed from test/pending/pos/t294/Ann2.java)0
-rw-r--r--test/files/pos/t294/Test_1.scala (renamed from test/pending/pos/t294/Test_1.scala)0
-rw-r--r--test/files/pos/t294/Test_2.scala (renamed from test/pending/pos/t294/Test_2.scala)0
-rw-r--r--test/files/pos/t3152.scala10
-rw-r--r--test/files/pos/t3160.scala6
-rw-r--r--test/files/pos/t3252.scala6
-rw-r--r--test/files/pos/t3349/Test.scala4
-rw-r--r--test/files/pos/t3363-new.scala4
-rw-r--r--test/files/pos/t3363-old.scala2
-rw-r--r--test/files/pos/t342.scala8
-rw-r--r--test/files/pos/t344.scala4
-rw-r--r--test/files/pos/t3440.scala10
-rw-r--r--test/files/pos/t3477.scala4
-rw-r--r--test/files/pos/t3731.scala4
-rw-r--r--test/files/pos/t3864/tuples_1.scala36
-rw-r--r--test/files/pos/t3883.scala8
-rw-r--r--test/files/pos/t3927.scala4
-rw-r--r--test/files/pos/t422.scala17
-rw-r--r--test/files/pos/t4649.flags1
-rw-r--r--test/files/pos/t4649.scala (renamed from test/pending/pos/t4649.scala)0
-rw-r--r--test/files/pos/t4744.flags1
-rw-r--r--test/files/pos/t4744/Bar.scala1
-rw-r--r--test/files/pos/t4744/Foo.java1
-rw-r--r--test/files/pos/t4786.scala (renamed from test/pending/pos/t4786.scala)0
-rw-r--r--test/files/pos/t4859.scala (renamed from test/pending/pos/t4859.scala)2
-rw-r--r--test/files/pos/t5223.scala2
-rw-r--r--test/files/pos/t5399a.scala (renamed from test/pending/pos/t5399a.scala)0
-rw-r--r--test/files/pos/t5606.scala (renamed from test/pending/pos/t5606.scala)0
-rw-r--r--test/files/pos/t5639/Bar.scala (renamed from test/pending/pos/t5639/Bar.scala)0
-rw-r--r--test/files/pos/t5639/Foo.scala (renamed from test/pending/pos/t5639/Foo.scala)0
-rw-r--r--test/files/pos/t5644/BoxesRunTime.java2
-rw-r--r--test/files/pos/t5692a.check (renamed from test/files/neg/t5692a.check)0
-rw-r--r--test/files/pos/t5692a.flags (renamed from test/files/neg/t5692b.flags)0
-rw-r--r--test/files/pos/t5692a/Macros_1.scala (renamed from test/files/neg/t5692a/Macros_1.scala)0
-rw-r--r--test/files/pos/t5692a/Test_2.scala (renamed from test/files/neg/t5692a/Test_2.scala)0
-rw-r--r--test/files/pos/t5692b.check (renamed from test/files/neg/t5692b.check)0
-rw-r--r--test/files/pos/t5692b.flags1
-rw-r--r--test/files/pos/t5692b/Macros_1.scala (renamed from test/files/neg/t5692b/Macros_1.scala)0
-rw-r--r--test/files/pos/t5692b/Test_2.scala (renamed from test/files/neg/t5692b/Test_2.scala)0
-rw-r--r--test/files/pos/t5809.scala5
-rw-r--r--test/files/pos/t5858.scala3
-rw-r--r--test/files/pos/t5877.scala4
-rw-r--r--test/files/pos/t5877b.scala2
-rw-r--r--test/files/pos/t6301.scala9
-rw-r--r--test/files/pos/t6355pos.scala16
-rw-r--r--test/files/pos/t640.scala4
-rw-r--r--test/files/pos/t6447.scala18
-rw-r--r--test/files/pos/t6664.scala4
-rw-r--r--test/files/pos/t6664b.scala5
-rw-r--r--test/files/pos/t6745.scala4
-rw-r--r--test/files/pos/t6966.scala17
-rw-r--r--test/files/pos/t715.cmds2
-rw-r--r--test/files/pos/t715/meredith_1.scala58
-rw-r--r--test/files/pos/t715/runner_2.scala2
-rw-r--r--test/files/pos/t911.scala8
-rw-r--r--test/files/pos/tcpoly_boundedmonad.scala18
-rw-r--r--test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala8
-rw-r--r--test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala4
-rw-r--r--test/files/pos/tcpoly_overloaded.scala18
-rw-r--r--test/files/pos/tcpoly_subst.scala2
-rw-r--r--test/files/pos/tcpoly_variance_pos.scala4
-rw-r--r--test/files/pos/tcpoly_wildcards.scala2
-rw-r--r--test/files/pos/ticket2251.scala14
-rw-r--r--test/files/pos/typealias_dubious.scala14
-rw-r--r--test/files/pos/variances-flip.scala7
-rw-r--r--test/files/pos/variances-local.scala7
-rw-r--r--test/files/pos/virtpatmat_binding_opt.scala4
-rw-r--r--test/files/presentation/callcc-interpreter.check6
-rwxr-xr-xtest/files/presentation/doc/doc.scala20
-rw-r--r--test/files/presentation/ide-bug-1000349.check6
-rw-r--r--test/files/presentation/ide-bug-1000475.check18
-rw-r--r--test/files/presentation/ide-bug-1000531.check9
-rw-r--r--test/files/presentation/implicit-member.check6
-rw-r--r--test/files/presentation/ping-pong.check12
-rw-r--r--test/files/presentation/t5708.check6
-rw-r--r--test/files/presentation/visibility.check30
-rw-r--r--test/files/run/Course-2002-07.scala24
-rw-r--r--test/files/run/Course-2002-08.scala4
-rw-r--r--test/files/run/Course-2002-09.scala12
-rw-r--r--test/files/run/Course-2002-13.scala4
-rw-r--r--test/files/run/all-overridden.check1
-rw-r--r--test/files/run/all-overridden.scala11
-rw-r--r--test/files/run/analyzerPlugins.check48
-rw-r--r--test/files/run/analyzerPlugins.scala6
-rw-r--r--test/files/run/array-addition.check4
-rw-r--r--test/files/run/array-addition.scala11
-rw-r--r--test/files/run/array-charSeq.scala1
-rw-r--r--test/files/run/arrays.scala2
-rw-r--r--test/files/run/bitsets.scala4
-rw-r--r--test/files/run/caseclasses.scala2
-rw-r--r--test/files/run/collection-stacks.check14
-rw-r--r--test/files/run/collection-stacks.scala38
-rw-r--r--test/files/run/compiler-asSeenFrom.scala18
-rw-r--r--test/files/run/constant-type.check8
-rw-r--r--test/files/run/constant-type.scala8
-rw-r--r--test/files/run/constrained-types.check4
-rw-r--r--test/files/run/ctries-new/iterator.scala114
-rw-r--r--test/files/run/deeps.check87
-rw-r--r--test/files/run/deeps.scala114
-rw-r--r--test/files/run/enums.scala14
-rw-r--r--test/files/run/exceptions-2.scala50
-rw-r--r--test/files/run/exceptions.scala4
-rw-r--r--test/files/run/existentials-in-compiler.check44
-rw-r--r--test/files/run/existentials-in-compiler.scala2
-rw-r--r--test/files/run/exoticnames.scala8
-rw-r--r--test/files/run/genericValueClass.scala11
-rw-r--r--test/files/run/hashset.check26
-rw-r--r--test/files/run/hashset.scala48
-rw-r--r--test/files/run/hashsetremove.check6
-rw-r--r--test/files/run/hashsetremove.scala13
-rw-r--r--test/files/run/idempotency-this.check2
-rw-r--r--test/files/run/inline-ex-handlers.check200
-rw-r--r--test/files/run/inline-ex-handlers.scala2
-rw-r--r--test/files/run/iterator-from.scala69
-rw-r--r--test/files/run/lazy-locals.scala2
-rw-r--r--test/files/run/longmap.check0
-rw-r--r--test/files/run/longmap.scala8
-rw-r--r--test/files/run/lub-visibility.check2
-rw-r--r--test/files/run/macro-abort-fresh/Macros_1.scala2
-rw-r--r--test/files/run/macro-abort-fresh/Test_2.scala2
-rw-r--r--test/files/run/macro-basic-ma-md-mi/Impls_1.scala6
-rw-r--r--test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala6
-rw-r--r--test/files/run/macro-basic-mamd-mi/Impls_1.scala6
-rw-r--r--test/files/run/macro-bodyexpandstoimpl/Impls_1.scala2
-rw-r--r--test/files/run/macro-declared-in-annotation/Impls_1.scala2
-rw-r--r--test/files/run/macro-declared-in-anonymous/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-block/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-class-class/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-class-object/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-class/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-default-param/Impls_1.scala2
-rw-r--r--test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-declared-in-method/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-object-class/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-object-object/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-object/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-package-object/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-refinement/Impls_1.scala4
-rw-r--r--test/files/run/macro-declared-in-trait/Impls_1.scala4
-rw-r--r--test/files/run/macro-def-infer-return-type-b/Test_2.scala2
-rw-r--r--test/files/run/macro-duplicate.check0
-rw-r--r--test/files/run/macro-duplicate.flags1
-rw-r--r--test/files/run/macro-duplicate/Impls_Macros_1.scala29
-rw-r--r--test/files/run/macro-duplicate/Test_2.scala6
-rw-r--r--test/files/run/macro-enclosures.check32
-rw-r--r--test/files/run/macro-enclosures.flags1
-rw-r--r--test/files/run/macro-enclosures/Impls_Macros_1.scala14
-rw-r--r--test/files/run/macro-enclosures/Test_2.scala11
-rw-r--r--test/files/run/macro-expand-implicit-argument/Macros_1.scala6
-rw-r--r--test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-multiple-arglists/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-nullary-generic/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-overload/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-override/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-recursive/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-tparams-explicit/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-tparams-implicit/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-tparams-optional/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala6
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala6
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala6
-rw-r--r--test/files/run/macro-expand-unapply-a.check2
-rw-r--r--test/files/run/macro-expand-unapply-a.flags1
-rw-r--r--test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala15
-rw-r--r--test/files/run/macro-expand-unapply-a/Test_2.scala6
-rw-r--r--test/files/run/macro-expand-unapply-b.check2
-rw-r--r--test/files/run/macro-expand-unapply-b.flags1
-rw-r--r--test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala37
-rw-r--r--test/files/run/macro-expand-unapply-b/Test_2.scala8
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala2
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala2
-rw-r--r--test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala2
-rw-r--r--test/files/run/macro-impl-default-params/Impls_Macros_1.scala8
-rw-r--r--test/files/run/macro-impl-rename-context/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-invalidret-nontypeable/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidusage-badret/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala2
-rw-r--r--test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-invalidusage-partialapplication/Test_2.scala2
-rw-r--r--test/files/run/macro-openmacros/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-range/Common_1.scala2
-rw-r--r--test/files/run/macro-range/Expansion_Impossible_2.scala10
-rw-r--r--test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala2
-rw-r--r--test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala2
-rw-r--r--test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala10
-rw-r--r--test/files/run/macro-reify-freevars/Test_2.scala8
-rw-r--r--test/files/run/macro-reify-nested-a/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-reify-nested-b/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-reify-splice-outside-reify/Test_2.scala2
-rw-r--r--test/files/run/macro-reify-tagless-a/Test_2.scala6
-rw-r--r--test/files/run/macro-reify-type/Macros_1.scala6
-rw-r--r--test/files/run/macro-reify-type/Test_2.scala8
-rw-r--r--test/files/run/macro-reify-unreify/Macros_1.scala2
-rw-r--r--test/files/run/macro-repl-basic.check10
-rw-r--r--test/files/run/macro-repl-basic.scala6
-rw-r--r--test/files/run/macro-repl-dontexpand.check2
-rw-r--r--test/files/run/macro-toplevel-companion-a.check0
-rw-r--r--test/files/run/macro-toplevel-companion-a.flags1
-rw-r--r--test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala14
-rw-r--r--test/files/run/macro-toplevel-companion-a/Test_2.scala8
-rw-r--r--test/files/run/macro-toplevel-companion-b.check4
-rw-r--r--test/files/run/macro-toplevel-companion-b.flags1
-rw-r--r--test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala15
-rw-r--r--test/files/run/macro-toplevel-companion-b/Test_2.scala11
-rw-r--r--test/files/run/macro-toplevel-companion-c.check3
-rw-r--r--test/files/run/macro-toplevel-companion-c.flags1
-rw-r--r--test/files/run/macro-toplevel-companion-c.scala51
-rw-r--r--test/files/run/macro-toplevel.check2
-rw-r--r--test/files/run/macro-toplevel/Macros_1.scala15
-rw-r--r--test/files/run/macro-toplevel/Test_2.scala6
-rw-r--r--test/files/run/macro-typecheck-implicitsdisabled.check2
-rw-r--r--test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala6
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled2.check2
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala6
-rw-r--r--test/files/run/map_java_conversions.scala2
-rw-r--r--test/files/run/mutable-treeset.scala145
-rw-r--r--test/files/run/no-pickle-skolems.check1
-rw-r--r--test/files/run/no-pickle-skolems/Source_1.scala5
-rw-r--r--test/files/run/no-pickle-skolems/Test_2.scala37
-rw-r--r--test/files/run/patmat_unapp_abstype-old.check4
-rw-r--r--test/files/run/patmat_unapp_abstype-old.flags1
-rw-r--r--test/files/run/patmat_unapp_abstype-old.scala83
-rw-r--r--test/files/run/programmatic-main.check60
-rw-r--r--test/files/run/reflection-allmirrors-tostring.scala20
-rw-r--r--test/files/run/reflection-enclosed-basic.scala4
-rw-r--r--test/files/run/reflection-enclosed-inner-basic.scala8
-rw-r--r--test/files/run/reflection-enclosed-inner-inner-basic.scala8
-rw-r--r--test/files/run/reflection-enclosed-inner-nested-basic.scala8
-rw-r--r--test/files/run/reflection-enclosed-nested-basic.scala8
-rw-r--r--test/files/run/reflection-enclosed-nested-inner-basic.scala8
-rw-r--r--test/files/run/reflection-enclosed-nested-nested-basic.scala8
-rw-r--r--test/files/run/reflection-equality.check2
-rw-r--r--test/files/run/reflection-equality.scala2
-rw-r--r--test/files/run/reflection-fieldmirror-accessorsareokay.scala4
-rw-r--r--test/files/run/reflection-fieldmirror-ctorparam.scala2
-rw-r--r--test/files/run/reflection-fieldmirror-getsetval.scala2
-rw-r--r--test/files/run/reflection-fieldmirror-getsetvar.scala2
-rw-r--r--test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala2
-rw-r--r--test/files/run/reflection-fieldmirror-privatethis.scala2
-rw-r--r--test/files/run/reflection-fieldsymbol-navigation.scala2
-rw-r--r--test/files/run/reflection-implClass.scala8
-rw-r--r--test/files/run/reflection-magicsymbols-invoke.scala4
-rw-r--r--test/files/run/reflection-magicsymbols-repl.check2
-rw-r--r--test/files/run/reflection-magicsymbols-repl.scala2
-rw-r--r--test/files/run/reflection-magicsymbols-vanilla.scala2
-rw-r--r--test/files/run/reflection-methodsymbol-params.scala16
-rw-r--r--test/files/run/reflection-methodsymbol-returntype.scala16
-rw-r--r--test/files/run/reflection-methodsymbol-typeparams.scala16
-rw-r--r--test/files/run/reflection-repl-classes.check4
-rw-r--r--test/files/run/reflection-repl-classes.scala2
-rw-r--r--test/files/run/reflection-sanitychecks.scala16
-rw-r--r--test/files/run/reflection-valueclasses-derived.scala6
-rw-r--r--test/files/run/reflection-valueclasses-magic.scala2
-rw-r--r--test/files/run/reflection-valueclasses-standard.scala4
-rw-r--r--test/files/run/reify-aliases.check2
-rw-r--r--test/files/run/reify_copypaste1.scala4
-rw-r--r--test/files/run/reify_implicits-new.check (renamed from test/pending/run/reify_implicits-new.check)0
-rw-r--r--test/files/run/reify_implicits-new.scala (renamed from test/pending/run/reify_implicits-new.scala)0
-rw-r--r--test/files/run/reify_implicits-old.check (renamed from test/pending/run/reify_implicits-old.check)0
-rw-r--r--test/files/run/reify_implicits-old.scala (renamed from test/pending/run/reify_implicits-old.scala)0
-rw-r--r--test/files/run/reify_newimpl_11.check6
-rw-r--r--test/files/run/reify_newimpl_13.check6
-rw-r--r--test/files/run/reify_newimpl_19.check6
-rw-r--r--test/files/run/reify_newimpl_22.check2
-rw-r--r--test/files/run/reify_newimpl_23.check2
-rw-r--r--test/files/run/reify_newimpl_25.check2
-rw-r--r--test/files/run/reify_newimpl_26.check2
-rw-r--r--test/files/run/reify_newimpl_30.check6
-rw-r--r--test/files/run/reify_printf.scala8
-rw-r--r--test/files/run/repl-bare-expr.check4
-rw-r--r--test/files/run/repl-colon-type.check25
-rw-r--r--test/files/run/repl-colon-type.scala4
-rw-r--r--test/files/run/repl-empty-package.check7
-rw-r--r--test/files/run/repl-empty-package/s_1.scala3
-rw-r--r--test/files/run/repl-empty-package/s_2.scala5
-rw-r--r--test/files/run/repl-javap-def.scala17
-rw-r--r--test/files/run/repl-javap-fun.scala16
-rw-r--r--test/files/run/repl-javap-mem.scala19
-rw-r--r--test/files/run/repl-javap-memfun.scala18
-rw-r--r--test/files/run/repl-javap-more-fun.scala17
-rw-r--r--test/files/run/repl-javap-outdir-funs/foo_1.scala6
-rw-r--r--test/files/run/repl-javap-outdir-funs/run-repl_7.scala12
-rw-r--r--test/files/run/repl-javap-outdir/foo_1.scala6
-rw-r--r--test/files/run/repl-javap-outdir/run-repl_7.scala12
-rw-r--r--test/files/run/repl-javap.scala13
-rw-r--r--test/files/run/repl-out-dir.check53
-rw-r--r--test/files/run/repl-out-dir.scala13
-rw-r--r--test/files/run/repl-paste.check2
-rw-r--r--test/files/run/runtime.scala2
-rw-r--r--test/files/run/search.check6
-rw-r--r--test/files/run/search.scala14
-rw-r--r--test/files/run/settings-parse.check566
-rw-r--r--test/files/run/settings-parse.scala27
-rw-r--r--test/files/run/shortClass.check10
-rw-r--r--test/files/run/shortClass.scala24
-rw-r--r--test/files/run/showraw_aliases.check4
-rw-r--r--test/files/run/showraw_mods.check2
-rw-r--r--test/files/run/showraw_tree.check4
-rw-r--r--test/files/run/showraw_tree_ids.check4
-rw-r--r--test/files/run/showraw_tree_kinds.check4
-rw-r--r--test/files/run/showraw_tree_types_ids.check16
-rw-r--r--test/files/run/showraw_tree_types_typed.check16
-rw-r--r--test/files/run/showraw_tree_types_untyped.check4
-rw-r--r--test/files/run/showraw_tree_ultimate.check16
-rw-r--r--test/files/run/structural.scala2
-rw-r--r--test/files/run/t0091.check1
-rw-r--r--test/files/run/t0091.scala15
-rw-r--r--test/files/run/t1042.scala2
-rw-r--r--test/files/run/t107.check1
-rw-r--r--test/files/run/t107.scala8
-rw-r--r--test/files/run/t1500.scala2
-rw-r--r--test/files/run/t1501.scala2
-rw-r--r--test/files/run/t1505.scala13
-rw-r--r--test/files/run/t2251.check1
-rw-r--r--test/files/run/t2251.scala19
-rw-r--r--test/files/run/t2251b.check11
-rw-r--r--test/files/run/t2251b.scala48
-rw-r--r--test/files/run/t2318.check (renamed from test/pending/run/t2318.check)0
-rw-r--r--test/files/run/t2318.scala (renamed from test/pending/run/t2318.scala)3
-rw-r--r--test/files/run/t2577.check1
-rw-r--r--test/files/run/t2577.scala17
-rw-r--r--test/files/run/t2873.check2
-rw-r--r--test/files/run/t2873.scala7
-rw-r--r--test/files/run/t2886.check4
-rw-r--r--test/files/run/t3038d.scala4
-rw-r--r--test/files/run/t3667.check3
-rw-r--r--test/files/run/t3667.scala35
-rw-r--r--test/files/run/t3835.scala2
-rw-r--r--test/files/run/t3897.check (renamed from test/pending/run/t3897.check)0
-rw-r--r--test/files/run/t3897/J_2.java (renamed from test/pending/run/t3897/J_2.java)0
-rw-r--r--test/files/run/t3897/a_1.scala (renamed from test/pending/run/t3897/a_1.scala)0
-rw-r--r--test/files/run/t3897/a_2.scala (renamed from test/pending/run/t3897/a_2.scala)0
-rw-r--r--test/files/run/t4023.check21
-rw-r--r--test/files/run/t4023.scala23
-rw-r--r--test/files/run/t4047.scala2
-rw-r--r--test/files/run/t4351.check (renamed from test/files/pos/t4351.check)0
-rw-r--r--test/files/run/t4351.scala (renamed from test/files/pos/t4351.scala)3
-rw-r--r--test/files/run/t4415.scala2
-rw-r--r--test/files/run/t4537.check1
-rw-r--r--test/files/run/t4537/a.scala (renamed from test/files/neg/t4537/a.scala)4
-rw-r--r--test/files/run/t4537/b.scala (renamed from test/files/neg/t4537/b.scala)4
-rw-r--r--test/files/run/t4537/c.scala8
-rw-r--r--test/files/run/t4537/d.scala6
-rw-r--r--test/files/run/t4671.check2
-rw-r--r--test/files/run/t4729/S_2.scala2
-rw-r--r--test/files/run/t4859.check8
-rw-r--r--test/files/run/t4859.scala29
-rw-r--r--test/files/run/t4935.flags2
-rw-r--r--test/files/run/t4996.check4
-rw-r--r--test/files/run/t4996.scala47
-rw-r--r--test/files/run/t5064.check6
-rw-r--r--test/files/run/t5225_2.check2
-rw-r--r--test/files/run/t5225_2.scala2
-rw-r--r--test/files/run/t5293-map.scala (renamed from test/pending/run/t5293-map.scala)0
-rw-r--r--test/files/run/t5293.scala (renamed from test/pending/run/t5293.scala)0
-rw-r--r--test/files/run/t5353.check2
-rw-r--r--test/files/run/t5353.scala9
-rw-r--r--test/files/run/t5418.check0
-rw-r--r--test/files/run/t5418.scala (renamed from test/pending/run/t5418.scala)0
-rw-r--r--test/files/run/t5418b.check2
-rw-r--r--test/files/run/t5603.check4
-rw-r--r--test/files/run/t5603.scala3
-rw-r--r--test/files/run/t5610a.check (renamed from test/pending/run/t5610a.check)0
-rw-r--r--test/files/run/t5610a.scala (renamed from test/pending/run/t5610a.scala)0
-rw-r--r--test/files/run/t5655.check2
-rw-r--r--test/files/run/t5879.check8
-rw-r--r--test/files/run/t5879.scala15
-rw-r--r--test/files/run/t6028.check20
-rw-r--r--test/files/run/t6064.scala9
-rw-r--r--test/files/run/t6102.flags2
-rw-r--r--test/files/run/t6150.scala8
-rw-r--r--test/files/run/t6178.scala2
-rw-r--r--test/files/run/t6181.scala2
-rw-r--r--test/files/run/t6187.check2
-rw-r--r--test/files/run/t6199-mirror.scala2
-rw-r--r--test/files/run/t6223.check2
-rw-r--r--test/files/run/t6223.scala2
-rw-r--r--test/files/run/t6288.check14
-rw-r--r--test/files/run/t6288b-jump-position.check6
-rw-r--r--test/files/run/t6329_repl.check32
-rw-r--r--test/files/run/t6329_repl.scala13
-rw-r--r--test/files/run/t6329_vanilla.check8
-rw-r--r--test/files/run/t6329_vanilla.scala14
-rw-r--r--test/files/run/t6381.check17
-rw-r--r--test/files/run/t6381.scala13
-rw-r--r--test/files/run/t6392b.check2
-rw-r--r--test/files/run/t6394a/Macros_1.scala2
-rw-r--r--test/files/run/t6406-regextract.check4
-rw-r--r--test/files/run/t6406-regextract.scala30
-rw-r--r--test/files/run/t6439.check25
-rw-r--r--test/files/run/t6439.scala12
-rw-r--r--test/files/run/t6448.check32
-rw-r--r--test/files/run/t6448.scala61
-rw-r--r--test/files/run/t6467.scala20
-rw-r--r--test/files/run/t6548.check2
-rw-r--r--test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java17
-rw-r--r--test/files/run/t6548/Test_2.scala12
-rw-r--r--test/files/run/t6572/bar_1.scala19
-rw-r--r--test/files/run/t6572/foo_2.scala17
-rw-r--r--test/files/run/t6591_1.check2
-rw-r--r--test/files/run/t6591_2.check2
-rw-r--r--test/files/run/t6591_3.check2
-rw-r--r--test/files/run/t6591_5.check2
-rw-r--r--test/files/run/t6591_6.check2
-rw-r--r--test/files/run/t6611.scala18
-rw-r--r--test/files/run/t6745-2.scala22
-rw-r--r--test/files/run/t6860.check4
-rw-r--r--test/files/run/t6860.scala20
-rw-r--r--test/files/run/t6939.scala13
-rw-r--r--test/files/run/t6955.scala4
-rw-r--r--test/files/run/t6989.check84
-rw-r--r--test/files/run/t7015.check11
-rw-r--r--test/files/run/t7015.scala49
-rw-r--r--test/files/run/t7064-old-style-supercalls.check1
-rw-r--r--test/files/run/t7064-old-style-supercalls.scala48
-rw-r--r--test/files/run/t7096.scala2
-rw-r--r--test/files/run/t7120.check1
-rw-r--r--test/files/run/t7120/Base_1.scala10
-rw-r--r--test/files/run/t7120/Derived_2.scala9
-rw-r--r--test/files/run/t7120/Run_3.scala3
-rw-r--r--test/files/run/t7120b.check2
-rw-r--r--test/files/run/t7120b.scala27
-rw-r--r--test/files/run/t7181.check23
-rw-r--r--test/files/run/t7181.scala78
-rw-r--r--test/files/run/t7185.check2
-rw-r--r--test/files/run/t7231.check2
-rw-r--r--test/files/run/t7231.scala11
-rw-r--r--test/files/run/tailcalls.scala18
-rw-r--r--test/files/run/test-cpp.scala2
-rw-r--r--test/files/run/toolbox_typecheck_implicitsdisabled.check2
-rw-r--r--test/files/run/toolbox_typecheck_implicitsdisabled.scala8
-rw-r--r--test/files/run/toolbox_typecheck_macrosdisabled.scala4
-rw-r--r--test/files/run/toolbox_typecheck_macrosdisabled2.check2
-rw-r--r--test/files/run/toolbox_typecheck_macrosdisabled2.scala4
-rw-r--r--test/files/run/try-2.scala16
-rw-r--r--test/files/run/try.scala10
-rw-r--r--test/files/run/unreachable.scala125
-rw-r--r--test/files/run/verify-ctor.scala2
-rw-r--r--test/files/scalacheck/CheckEither.scala28
-rw-r--r--test/files/scalacheck/ReflectionExtractors.scala52
-rw-r--r--test/files/scalacheck/redblack.scala213
-rw-r--r--test/flaky/pos/t2868.cmds3
-rw-r--r--test/instrumented/library/scala/runtime/BoxesRunTime.java2
-rw-r--r--test/instrumented/library/scala/runtime/ScalaRunTime.scala2
-rw-r--r--test/osgi/src/BasicReflection.scala18
-rwxr-xr-xtest/partest19
-rwxr-xr-xtest/partest.bat2
-rw-r--r--test/pending/jvm/cf-attributes.scala26
-rw-r--r--test/pending/pos/overloading-boundaries.scala37
-rw-r--r--test/pending/pos/t1751.cmds3
-rw-r--r--test/pending/pos/t1782.cmds2
-rw-r--r--test/pending/pos/t1832.scala10
-rw-r--r--test/pending/pos/t294.cmds3
-rw-r--r--test/pending/pos/t4612.scala15
-rw-r--r--test/pending/pos/t4695/T_1.scala4
-rw-r--r--test/pending/pos/t4695/T_2.scala4
-rw-r--r--test/pending/pos/t4717.scala7
-rw-r--r--test/pending/pos/t5082.scala8
-rw-r--r--test/pending/pos/t5259.scala14
-rw-r--r--test/pending/pos/t5399.scala8
-rw-r--r--test/pending/pos/t5459.scala48
-rw-r--r--test/pending/pos/t5626.scala12
-rw-r--r--test/pending/pos/t5654.scala4
-rw-r--r--test/pending/pos/t5877.scala5
-rw-r--r--test/pending/pos/t5954/T_1.scala8
-rw-r--r--test/pending/pos/t5954/T_2.scala8
-rw-r--r--test/pending/pos/t5954/T_3.scala8
-rw-r--r--test/pending/pos/t6225.scala11
-rw-r--r--test/pending/pos/those-kinds-are-high.scala53
-rw-r--r--test/pending/pos/z1720.scala16
-rw-r--r--test/pending/run/hk-lub-fail.scala10
-rw-r--r--test/pending/run/macro-expand-default/Impls_1.scala4
-rw-r--r--test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala4
-rw-r--r--test/pending/run/macro-expand-named/Impls_1.scala4
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala6
-rw-r--r--test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala6
-rw-r--r--test/pending/run/macro-reify-tagless-b/Test_2.scala6
-rw-r--r--test/pending/run/t5427a.scala2
-rw-r--r--test/pending/run/t5427b.scala2
-rw-r--r--test/pending/run/t5427c.scala2
-rw-r--r--test/pending/run/t5427d.scala2
-rw-r--r--test/postreview.py2540
-rwxr-xr-xtest/review44
-rw-r--r--test/scaladoc/resources/SI_4715.scala4
-rw-r--r--test/scaladoc/resources/Trac4325.scala4
-rw-r--r--test/scaladoc/resources/doc-root/Any.scala2
-rw-r--r--test/scaladoc/resources/doc-root/AnyRef.scala2
-rw-r--r--test/scaladoc/resources/doc-root/Nothing.scala2
-rw-r--r--test/scaladoc/resources/doc-root/Null.scala2
-rw-r--r--test/scaladoc/resources/implicits-base-res.scala80
-rw-r--r--test/scaladoc/resources/links.scala2
-rw-r--r--test/scaladoc/run/SI-6812.scala24
-rw-r--r--test/scaladoc/run/implicits-base.scala148
-rw-r--r--test/scaladoc/run/links.scala2
-rw-r--r--test/scaladoc/run/t5527.check (renamed from test/files/run/t5527.check)9
-rw-r--r--test/scaladoc/run/t5527.scala (renamed from test/files/run/t5527.scala)0
-rw-r--r--test/scaladoc/scalacheck/HtmlFactoryTest.scala2
-rw-r--r--test/scaladoc/scalacheck/IndexScriptTest.scala2
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala6
-rw-r--r--test/script-tests/jar-manifest/run-test.check2
-rwxr-xr-xtools/binary-repo-lib.sh2
-rwxr-xr-xtools/buildcp2
-rwxr-xr-xtools/make-release-notes49
-rw-r--r--tools/make-release-notes.scala129
-rwxr-xr-xtools/partest-ack141
-rwxr-xr-xtools/stability-test.sh29
-rwxr-xr-xtools/strapcp3
2353 files changed, 31953 insertions, 72464 deletions
diff --git a/.gitattributes b/.gitattributes
index 958b0b9f28..ac98781b3d 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,10 +1,16 @@
+# fallback on built-in heuristics
+# this must be first so later entries will override it
+* text=auto
+
# These files are text and should be normalized (convert crlf => lf)
*.c text
*.check text
*.css text
+*.flags text
*.html text
*.java text
*.js text
+*.policy text
*.sbt text
*.scala text
*.sh text
diff --git a/.mailmap b/.mailmap
index 49d5dc6629..e461c0cdf8 100644
--- a/.mailmap
+++ b/.mailmap
@@ -1,25 +1,74 @@
-Aleksandar Prokopec <aleksandar@aleksandar-Latitude-E6500.(none)>
-Aleksandar Prokopec <aleksandar@htpc.(none)>
-Aleksandar Prokopec <aleksandar@htpc-axel22.(none)>
-Aleksandar Prokopec <aleksandar@lampmac14.epfl.ch>
+Adriaan Moors <adriaan.moors@typesafe.com>
+Adriaan Moors <adriaan.moors@typesafe.com> <adriaan.moors@epfl.ch>
+Adriaan Moors <adriaan.moors@typesafe.com> <adriaanm@gmail.com>
Aleksandar Prokopec <aleksandar.prokopec@epfl.ch>
+Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <aleksandar.prokopec@gmail.com>
+Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <aleksandar@aleksandar-Latitude-E6500.(none)>
+Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <aleksandar@htpc-axel22.(none)>
+Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <aleksandar@htpc.(none)>
+Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <aleksandar@lampmac14.epfl.ch>
+Aleksandar Prokopec <aleksandar.prokopec@epfl.ch> <axel22@gmail.com>
+Alex Cruise <alex@cluonflux.com>
+Alex Cruise <alex@cluonflux.com> <alex@metaforsoftware.com>
Antonio Cunei <antonio.cunei@typesafe.com>
+Antonio Cunei <antonio.cunei@typesafe.com> <antonio.cunei@epfl.ch>
+Buraq Emir <buraq@epfl.ch>
Caoyuan Deng <dcaoyuan@epfl.ch>
Chris Hodapp <clhodapp1@gmail.com>
Chris James <chrisJames@epfl.ch>
Christopher Vogt <vogt@epfl.ch>
+Christopher Vogt <vogt@epfl.ch> <christopher.vogt@epfl.ch>
+Christopher Vogt <vogt@epfl.ch> <github.com.nsp@cvogt.org>
Damien Obristi <damien.obrist@gmail.com>
-Daniel C. Sobral <dcs@dcs-132-CK-NF79.(none)>
+Daniel C. Sobral <dcsobral@gmail.com>
+Daniel C. Sobral <dcsobral@gmail.com> <dcs@dcs-132-CK-NF79.(none)>
+Daniel Lorch <lorch@epfl.ch>
+Erik Stenman <stenman@epfl.ch>
+Eugene Burmako <xeno.by@gmail.com>
+Eugene Burmako <xeno.by@gmail.com> <burmako@epfl.ch>
+Eugene Vigdorchik <eugenevigdorchik@epfl.ch> <eugene.vigdorchik@gmail.com>
+Geoff Reedy <geoff@programmer-monk.net> <gereedy@sandia.gov>
Ilya Sergei <ilyas@epfl.ch>
-Ingo Maier <ingoem@gmail.com>
+Ingo Maier <ingo.maier@epfl.ch>
+Ingo Maier <ingo.maier@epfl.ch> <ingoem@gmail.com>
+Josh Suereth <joshua.suereth@gmail.com>
+Josh Suereth <joshua.suereth@gmail.com> <Joshua.Suereth@gmail.com>
+Julien Eberle <jeberle@epfl.ch>
Kenji Yoshida <6b656e6a69@gmail.com>
+Luc Bourlier <luc.bourlier@typesafe.com>
+Luc Bourlier <luc.bourlier@typesafe.com> <skyluc@epfl.ch>
Luc Bourlier <skyluc@epfl.ch>
-Martin Odersky <odersky@gamil.com>
+Martin Odersky <odersky@gmail.com>
+Martin Odersky <odersky@gmail.com> <odersky@gamil.com>
+Michael Pradel <pradel@epfl.ch>
+Michel Schinz <schinz@epfl.ch>
+Miguel Garcia <magarcia@epfl.ch>
+Miguel Garcia <magarcia@epfl.ch> <miguelalfredo.garcia@epfl.ch>
+Mirco Dotta <mirco.dotta@typesafe.com>
+Mirco Dotta <mirco.dotta@typesafe.com> <mirco.dotta@gmail.com>
+Moez A. Abdel-Gawad <moez@epfl.ch>
+Mohsen Lesani <lesani@epfl.ch>
Nada Amin <amin@epfl.ch>
-Nada Amin <nada.amin@epfl.ch>
+Nada Amin <amin@epfl.ch> <nada.amin@epfl.ch>
+Nada Amin <amin@epfl.ch> <namin@alum.mit.edu>
Natallie Baikevich <lu-a-jalla@ya.ru>
+Nikolay Mihaylov <mihaylov@epfl.ch>
+Paolo Giarrusso <p.giarrusso@gmail.com>
Pavel Pavlov <pavel.e.pavlov@gmail.com>
Philipp Haller <philipp.haller@typesafe.com>
+Philipp Haller <philipp.haller@typesafe.com> <hallerp@gmail.com>
+Philippe Altherr <paltherr@epfl.ch>
+Raphaël Noir <noir@epfl.ch>
Roland Kuhn <rk@rkuhn.info>
Rüdiger Klaehn <rklaehn@gmail.com>
+Sebastian Hack <shack@epfl.ch>
+Simon Ochsenreither <simon@ochsenreither.de>
+Stepan Koltsov <stepancheg@epfl.ch>
Stéphane Micheloud <michelou@epfl.ch>
+Unknown Committer <lost.soul@typesafe.com>
+Unknown Committer <lost.soul@typesafe.com> <USER@epfl.ch>
+Unknown Committer <lost.soul@typesafe.com> <noreply@epfl.ch>
+Viktor Klang <viktor.klang@gmail.com>
+Vincent Cremet <cremet@epfl.ch>
+Vojin Jovanovic <vojin.jovanovic@epfl.ch>
+Vojin Jovanovic <vojin.jovanovic@epfl.ch> <gvojin@gmail.com>
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 4d5573db12..4ee2d086ac 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -7,9 +7,7 @@ Eclipse-LazyStart: true
Bundle-ClassPath:
.,
bin,
- lib/fjbg.jar,
lib/jline.jar,
- lib/msil.jar
Export-Package:
scala.tools.nsc,
scala.tools.nsc.ast,
@@ -47,11 +45,6 @@ Export-Package:
scala.reflect.runtime,
scala.reflect.internal.transform,
scala.reflect.api,
- ch.epfl.lamp.compiler.msil,
- ch.epfl.lamp.compiler.msil.emit,
- ch.epfl.lamp.compiler.msil.util,
- ch.epfl.lamp.fjbg,
- ch.epfl.lamp.util
Require-Bundle:
org.apache.ant,
org.scala-ide.scala.library
diff --git a/README.rst b/README.rst
index 72c4b6028b..c871adb908 100644
--- a/README.rst
+++ b/README.rst
@@ -7,18 +7,17 @@ and how to build it. For information about Scala as a language, you can visit
the web site http://www.scala-lang.org/
Part I. The repository layout
---------------------------------------------------------------------------------
+--------------------------------------------------------------------------------
Follows the file layout of the Scala repository. Files marked with a † are not
part of the repository but are either automatically generated by the
-build script or user-created if needed. This is not a complete listing. ::
+build script or user-created if needed. This is not a complete listing. ::
scala/
+--build/ Build products output directory for ant.
+--build.xml The main Ant build script.
+--dist/ The destination folder for Scala distributions.
+--docs/ Documentation and sample code.
+--lib/ Pre-compiled libraries for the build.
- | +--fjbg.jar The Java byte-code generation library.
| +--scala-compiler.jar The stable reference ('starr') compiler jar
| +--scala-library.jar The stable reference ('starr') library jar
| +--scala-library-src.jar A snapshot of the source used to build starr.
@@ -68,7 +67,7 @@ description of the four layers that SABBUS uses, from bottom to top:
- ``strap``: a test layer used to check stability of the build.
^^^^^^^^^^^^^^^^^^^^^^^^
- DEPENDANT CHANGES:
+ DEPENDENT CHANGES:
^^^^^^^^^^^^^^^^^^^^^^^^
SABBUS compiles, for each layer, the Scala library first and the compiler next.
That means that any changes in the library can immediately be used in the
@@ -111,12 +110,12 @@ Part III. Common use-cases
- ``ln -s build/quick/bin qbin`` (once):
- ``ant && qbin/scalac -d sandbox sandbox/test.scala && qbin/scala -cp sandbox Test``
-
+
Incrementally builds quick, and then uses it to compile and run the file
``sandbox/test.scala``. This is a typical debug cycle.
- ``ant replacelocker``
-
+
"unfreezes" locker by updating it to match the current source code.
- This will delete quick so as not to mix classes compiled with different
@@ -137,7 +136,7 @@ Part III. Common use-cases
ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
- ``ant dist``
-
+
Builds a distribution.
- Rebuilds locker from scratch (to make sure it bootstraps).
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
deleted file mode 100644
index 4794666721..0000000000
--- a/bincompat-backward.whitelist.conf
+++ /dev/null
@@ -1,163 +0,0 @@
-filter {
- problems=[
- # Scala library
- {
- # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
- problemName=MissingMethodProblem
- },
- {
- # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
- problemName=MissingMethodProblem
- },
- # {
- # # private[scala]
- # matchName="scala.collection.immutable.ListSerializeStart$"
- # problemName=MissingClassProblem
- # },
- # {
- # # private[scala]
- # matchName="scala.collection.immutable.ListSerializeStart"
- # problemName=MissingClassProblem
- # },
- {
- # private nested class became private top-level class to fix SI-7018
- matchName="scala.reflect.macros.Attachments$NonemptyAttachments"
- problemName=MissingClassProblem
- },
-
- # scala.reflect.runtime
- # {
- # matchName="scala.reflect.runtime.JavaUniverse.createClassModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.JavaUniverse.initClassModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.createClassModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.initClassModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.initClassAndModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.initAndEnterClassAndModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
- # problemName=IncompatibleResultTypeProblem
- # },
-
-
- # scala.reflect.internal
- {
- matchName="scala.reflect.internal.TreeInfo.scala$reflect$internal$TreeInfo$$isVarPatternDeep0$1"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.typeArguments"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.applyDepth"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.firstTypeArg"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.methPart"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.firstArgument"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Trees.DefDef"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.ExistentialsAndSkolems.deskolemizeTypeParams"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.deAlias"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.deskolemizeTypeParams"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.AnnotationInfos#Annotatable.addThrowsAnnotation"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.StdAttachments#Attachable.setAttachments"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types#TypeVar.scala$reflect$internal$Types$TypeVar$$addBound$1"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.AnnotationCheckers$AnnotationChecker"
- problemName=IncompatibleTemplateDefProblem
- },
- {
- matchName="scala.reflect.internal.Types.deAlias"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.inheritsJavaVarArgsMethod"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.nonTrivialMembers"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.isJavaVarargsAncestor"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.nestedMemberType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.normalizeAliases"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Trees#ChangeOwnerTraverser.changeOwner"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.StdAttachments.SuppressMacroExpansionAttachment"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.util.Statistics#RelCounter.scala$reflect$internal$util$Statistics$RelCounter$$super$prefix"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Names#NameOps.name"
- problemName=MissingFieldProblem
- }
- ]
-}
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
deleted file mode 100644
index 529fab1e14..0000000000
--- a/bincompat-forward.whitelist.conf
+++ /dev/null
@@ -1,359 +0,0 @@
-filter {
- problems=[
- # rework d526f8bd74 to duplicate tailImpl as a private method
- # {
- # matchName="scala.collection.mutable.MutableList.tailImpl"
- # problemName=MissingMethodProblem
- # },
- {
- # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
- problemName=MissingMethodProblem
- },
- {
- # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
- problemName=MissingMethodProblem
- },
- {
- # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
- matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
- problemName=MissingMethodProblem
- },
- # TODO: revert a557a97360: bridge method appeared because result is now Int but the super-method's result type erases to Object
- # {
- # matchName="scala.collection.immutable.Range.head"
- # problemName=IncompatibleResultTypeProblem
- # },
- # revert 0b92073a38 2aa66bec86: SI-4664 [Make scala.util.Random Serializable] Add test case
- # {
- # matchName="scala.util.Random"
- # problemName=MissingTypesProblem
- # },
- # {
- # matchName="scala.util.Random$"
- # problemName=MissingTypesProblem
- # },
- # {
- # # private[concurrent]
- # matchName="scala.concurrent.BatchingExecutor$Batch"
- # problemName=MissingClassProblem
- # },
- # {
- # # private[concurrent]
- # matchName="scala.concurrent.BatchingExecutor"
- # problemName=MissingClassProblem
- # },
- # {
- # # private[concurrent]
- # matchName="scala.concurrent.impl.ExecutionContextImpl$AdaptedForkJoinTask"
- # problemName=MissingClassProblem
- # },
- # {
- # # private[concurrent]
- # matchName="scala.concurrent.impl.ExecutionContextImpl.scala$concurrent$impl$ExecutionContextImpl$$uncaughtExceptionHandler"
- # problemName=MissingMethodProblem
- # },
- {
- # private nested class became private top-level class to fix SI-7018
- matchName="scala.reflect.macros.NonemptyAttachments"
- problemName=MissingClassProblem
- },
-
- # scala.reflect.runtime
- # {
- # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
- # problemName=IncompatibleResultTypeProblem
- # },
- # {
- # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala1"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.initClassAndModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.initAndEnterClassAndModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.createClassModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.SymbolLoaders.initClassModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.JavaUniverse"
- # problemName=MissingTypesProblem
- # },
- # {
- # matchName="scala.reflect.runtime.JavaUniverse.initClassAndModule"
- # problemName=MissingMethodProblem
- # },
- # {
- # matchName="scala.reflect.runtime.JavaUniverse.initAndEnterClassAndModule"
- # problemName=MissingMethodProblem
- # },
-
- # scala.reflect.internal
- {
- matchName="scala.reflect.internal.Types#Type.dealiasWidenChain"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types#Type.dealiasWiden"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types#Type.addThrowsAnnotation"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types#TypeVar.scala$reflect$internal$Types$TypeVar$$unifySpecific$1"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types#SubstSymMap.mapTreeSymbols"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types#SubstSymMap.this"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.PrivateWithin"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo$Applied"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.AnnotationInfos#Annotatable.addThrowsAnnotation"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.inheritsJavaVarArgsMethod"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.nonTrivialMembers"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.isJavaVarargsAncestor"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.normalizeAliases"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.nestedMemberType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Types.deAlias"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.ExistentialsAndSkolems.deskolemizeTypeParams"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.StdAttachments#Attachable.setAttachments"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.AnnotationInfos#AnnotationInfo.completeInfo"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Symbols#Symbol.isCompileTimeOnly"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Symbols#Symbol.addThrowsAnnotation"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Symbols#Symbol.toOption"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Symbols#Symbol.compileTimeOnlyMessage"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Symbols#Symbol.setAttachments"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Symbols#Symbol.addAnnotation"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.reflect.internal.Trees.DefDef"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Trees$TreeStackTraverser"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.Trees#ChangeOwnerTraverser.change"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.AnnotationCheckers$AnnotationChecker"
- problemName=IncompatibleTemplateDefProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo$Applied$"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.Trees#Tree.setAttachments"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.StdNames#TermNames.DEFAULT_CASE"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.Applied"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.isWildcardStarType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.isSyntheticDefaultCase"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.StripCast"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.dissectApplied"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.TreeInfo.stripCast"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable"
- problemName=MissingTypesProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.StringContextStripMarginOps"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.inheritsJavaVarArgsMethod"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.assertCorrectThread"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.SuppressMacroExpansionAttachment"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.nonTrivialMembers"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.isJavaVarargsAncestor"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.normalizeAliases"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.DefDef"
- problemName=IncompatibleMethTypeProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.nestedMemberType"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.SymbolTable.importPrivateWithinFromJavaFlags"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.StdAttachments.SuppressMacroExpansionAttachment"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.ArrayModule_genericApply"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.allParameters"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.Predef_wrapArray"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.dropNullaryMethod"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.ArrayModule_apply"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.OptionModule"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.Option_apply"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.Definitions#DefinitionsClass.CompileTimeOnlyAttr"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.AnnotationInfos#LazyAnnotationInfo.completeInfo"
- problemName=MissingMethodProblem
- },
- {
- matchName="scala.reflect.internal.util.package"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.util.package$"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.util.StripMarginInterpolator"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.util.package$StringContextStripMarginOps"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.annotations.compileTimeOnly"
- problemName=MissingClassProblem
- },
- {
- matchName="scala.reflect.internal.StdNames#TermNames.SelectFromTypeTree"
- problemName=MissingMethodProblem
- }
- ]
-}
diff --git a/build.detach.xml b/build.detach.xml
deleted file mode 100644
index 132c812a26..0000000000
--- a/build.detach.xml
+++ /dev/null
@@ -1,186 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="sabbus" default="build">
-
- <description>
-SuperSabbus for Scala detach plugin.
- </description>
-
- <echo level="info" message="Running SABBUS for ${ant.project.name}..."/>
-
-<!-- ===========================================================================
-END-USER TARGETS
-============================================================================ -->
-
- <target name="build" depends="pack.done"
- description="Builds the Scala detach plugin."/>
-
- <target name="clean" depends="quick.clean">
- </target>
-
- <target name="all.clean" depends="quick.clean, pack.clean">
- </target>
-
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-
- <property environment="env"/>
- <!-- Prevents system classpath from being used -->
- <property name="build.sysclasspath" value="ignore"/>
-
- <!-- Defines the repository layout -->
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="src.dir" value="${basedir}/src"/>
- <property name="partest.dir" value="${basedir}/test"/>
-
- <!-- Loads custom properties definitions -->
- <property file="${basedir}/build.properties"/>
-
- <!-- Sets location of build folders -->
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build-quick.dir" value="${build.dir}/quick"/>
- <property name="build-pack.dir" value="${build.dir}/pack"/>
-
- <!-- if ANT_OPTS is already set by the environment, it will be unaltered,
- but if it is unset it will take this default value. -->
- <property name="env.ANT_OPTS" value="-Xms1024M -Xmx1024M -Xss1M -XX:MaxPermSize=128M -XX:+UseParallelGC" />
-
- <property
- name="scalacfork.jvmargs"
- value="${env.ANT_OPTS}"/>
-
- <property name="scalac.args.quick" value="-deprecation"/>
- <property name="scalac.args.optimise" value=""/>
-
- <!-- Setting-up Ant contrib tasks -->
- <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
-
-<!-- ===========================================================================
-QUICK BUILD (QUICK)
-============================================================================ -->
-
- <target name="quick.clean">
- <delete includeemptydirs="yes" quiet="yes" failonerror="no">
- <fileset dir="${build-quick.dir}/classes/detach-library"/>
- <fileset dir="${build-quick.dir}/classes/detach-plugin"/>
- </delete>
- </target>
-
- <target name="quick.done">
- <stopwatch name="quick.done.timer"/>
- <path id="quick.classpath">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${lib.dir}/fjbg.jar"/>
- <pathelement location="${lib.dir}/msil.jar"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- <pathelement location="${ant.home}/lib/ant.jar"/>
- </path>
- <taskdef
- resource="scala/tools/ant/sabbus/antlib.xml"
- classpathref="quick.classpath"
- />
- <mkdir dir="${build-quick.dir}/classes/detach-plugin"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/detach-plugin"
- compilerpathref="quick.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/detach/plugin"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/detach-plugin"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
- </scalacfork>
- <copy
- file="${src.dir}/detach/plugin/scalac-plugin.xml"
- todir="${build-quick.dir}/classes/detach-plugin"
- />
- <mkdir dir="${build-quick.dir}/classes/detach-library"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/detach-library"
- compilerpathref="quick.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/detach/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
- <stopwatch name="quick.done.timer" action="total"/>
- </target>
-
-<!-- ===========================================================================
-PACKED QUICK BUILD (PACK)
-============================================================================ -->
-
- <target name="pack.start" depends="quick.done"/>
-
- <target name="pack.pre-lib" depends="pack.start">
- <uptodate
- property="pack.lib.available"
- targetfile="${build-pack.dir}/lib/scala-detach.jar"
- srcfile="${build-quick.dir}/plugins.complete"/>
- </target>
-
- <target name="pack.lib" depends="pack.pre-lib" unless="pack.lib.available">
- <mkdir dir="${build-pack.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-pack.dir}/misc/scala-devel/plugins/detach.jar">
- <fileset dir="${build-quick.dir}/classes/detach-plugin"/>
- </jar>
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scala-detach.jar">
- <fileset dir="${build-quick.dir}/classes/detach-library">
- <include name="scala/**"/>
- </fileset>
- </jar>
- </target>
-
- <target name="pack.done" depends="pack.lib">
- <path id="pack.classpath">
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-detach.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
- <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${ant.home}/lib/ant.jar"/>
- <pathelement location="${lib.dir}/jline.jar"/>
- </path>
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.classpath"/>
- <taskdef resource="scala/tools/partest/antlib.xml" classpathref="pack.classpath"/>
- </target>
-
- <target name="pack.clean">
- <delete includeemptydirs="yes" quiet="yes" failonerror="no">
- <fileset dir="${build-pack.dir}/lib" includes="scala-detach.jar"/>
- <fileset dir="${build-pack.dir}/misc/scala-devel/plugins" includes="detach.jar"/>
- </delete>
- </target>
-
-<!-- ===========================================================================
-TEST SUITE
-============================================================================ -->
-
- <target name="test.suite" depends="pack.done">
- <property name="partest.srcdir" value="files" />
- <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="2400000"
- srcdir="${partest.srcdir}"
- scalacopts="${scalac.args.optimise} -Xpluginsdir ${build-pack.dir}/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable">
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
- <negtests dir="${partest.dir}/${partest.srcdir}/detach-neg" includes="*.scala"/>
- <runtests dir="${partest.dir}/${partest.srcdir}/detach-run" includes="*.scala"/>
- </partest>
- </target>
-
-</project>
diff --git a/build.examples.xml b/build.examples.xml
index 62210d5ece..82432121ca 100644
--- a/build.examples.xml
+++ b/build.examples.xml
@@ -28,10 +28,6 @@ PROPERTIES
<!-- Location of pre-compiled libraries properties -->
<property name="scala.lib.jar" value="${lib.dir}/scala-library.jar"/>
<property name="scala.comp.jar" value="${lib.dir}/scala-compiler.jar"/>
- <property name="fjbg.name" value="fjbg.jar"/>
- <property name="fjbg.jar" value="${lib.dir}/${fjbg.name}"/>
- <property name="msil.name" value="msil.jar"/>
- <property name="msil.jar" value="${lib.dir}/${msil.name}"/>
<property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
<property name="ant-contrib.jar" value="${lib.dir}/ant/ant-contrib.jar"/>
<!-- -->
@@ -59,7 +55,7 @@ INITIALISATION
<fail message="Scala library in '${lib.dir}/' is not available">
<condition><not><and>
<available
- classname="scala.Predef"
+ classname="scala.Predef"
classpath="${scala.lib.jar}"
/>
<available
@@ -81,24 +77,6 @@ INITIALISATION
/>
</not></condition>
</fail>
- <echo level="verbose" message="fjbg.jar=${fjbg.jar}"/>
- <fail message="FJBG library in '${lib.dir}/' is not available">
- <condition><not>
- <available
- classname="ch.epfl.lamp.fjbg.JCode"
- classpath="${fjbg.jar}"
- />
- </not></condition>
- </fail>
- <echo level="verbose" message="msil.jar=${msil.jar}"/>
- <fail message="MSIL library in '${lib.dir}/' is not available">
- <condition><not>
- <available
- classname="ch.epfl.lamp.compiler.msil.MemberInfo"
- classpath="${msil.jar}"
- />
- </not></condition>
- </fail>
<echo level="verbose" message="ant.jar=${ant.jar}"/>
<echo level="verbose" message="ant-contrib.jar=${ant-contrib.jar}"/>
<fail message="Additional Ant tasks in '${lib.dir}/' is not available">
@@ -110,14 +88,9 @@ INITIALISATION
</not></condition>
</fail>
<!-- Creating class-pathes -->
- <path id="common.classpath">
- <pathelement location="${fjbg.jar}"/>
- <pathelement location="${msil.jar}"/>
- </path>
<path id="scala.classpath">
<pathelement location="${scala.lib.jar}"/>
<pathelement location="${scala.comp.jar}"/>
- <path refid="common.classpath"/>
</path>
<!-- Creating boot-level tasks -->
<taskdef resource="net/sf/antcontrib/antlib.xml">
diff --git a/build.number b/build.number
index 0b06d13468..51674b6915 100644
--- a/build.number
+++ b/build.number
@@ -1,7 +1,7 @@
#Tue Sep 11 19:21:09 CEST 2007
version.major=2
-version.minor=10
-version.patch=2
+version.minor=11
+version.patch=0
# This is the -N part of a version. if it's 0, it's dropped from maven versions.
version.bnum=0
diff --git a/build.number.maven b/build.number.maven
index eed9f3897c..a8da54397d 100644
--- a/build.number.maven
+++ b/build.number.maven
@@ -1,3 +1,3 @@
version.major=2
-version.minor=10
+version.minor=11
version.patch=0
diff --git a/build.xml b/build.xml
index 01d867d8ca..7de9f80f89 100644
--- a/build.xml
+++ b/build.xml
@@ -22,7 +22,7 @@ END-USER TARGETS
<target name="clean" depends="quick.clean"
description="Removes binaries of compiler and library. Distributions are untouched."/>
- <target name="test" depends="test.done"
+ <target name="test" depends="test.done, osgi.test"
description="Runs test suite and bootstrapping test on Scala compiler and library."/>
<target name="test-opt"
@@ -203,7 +203,6 @@ PROPERTIES
<!-- Sets location of pre-compiled libraries -->
<property name="lib.starr.jar" value="${lib.dir}/scala-library.jar"/>
- <property name="msil.starr.jar" value="${lib.dir}/msil.jar"/>
<property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
<property name="comp.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
<property name="jline.jar" value="${lib.dir}/jline.jar"/>
@@ -434,6 +433,7 @@ INITIALISATION
<property name="scalac.args.quickonly" value=""/>
<property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
<property name="scalac.args.quick" value="${scalac.args.all} ${scalac.args.quickonly}"/>
+ <property name="scalac.args.strap" value="${scalac.args.quick}"/>
<!-- Setting-up Ant contrib tasks -->
<taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
<!-- This is the start time for the distribution -->
@@ -461,9 +461,7 @@ INITIALISATION
<!-- Libraries only used for STARR -->
<path id="starr.dep.libs">
<fileset dir="${lib.dir}">
- <include name="fjbg.jar"/>
<include name="forkjoin.jar"/>
- <include name="msil.jar"/>
</fileset>
</path>
<!-- Auxiliary libs placed on every classpath. -->
@@ -593,57 +591,10 @@ LOCAL DEPENDENCY (FORKJOIN)
</target>
<!-- ===========================================================================
-LOCAL DEPENDENCY (FJBG)
-============================================================================ -->
-
- <target name="fjbg.init" depends="init">
- <uptodate property="fjbg.available" targetfile="${build-libs.dir}/fjbg.complete">
- <srcfiles dir="${src.dir}/fjbg">
- <include name="**/*.java"/>
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
- </target>
-
- <target name="fjbg.lib" depends="fjbg.init" unless="fjbg.available">
- <stopwatch name="fjbg.lib.timer" />
- <mkdir dir="${build-libs.dir}/classes/fjbg"/>
- <javac
- srcdir="${src.dir}/fjbg"
- destdir="${build-libs.dir}/classes/fjbg"
- classpath="${build-libs.dir}/classes/fjbg"
- includes="**/*.java"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args} -XDignore.symbol.file"/>
- </javac>
- <touch file="${build-libs.dir}/fjbg.complete" verbose="no"/>
- <stopwatch name="fjbg.lib.timer" action="total"/>
- </target>
-
- <target name="fjbg.pack" depends="fjbg.lib">
- <jar destfile="${build-libs.dir}/fjbg.jar">
- <fileset dir="${build-libs.dir}/classes/fjbg"/>
- </jar>
- </target>
-
- <target name="fjbg.done" depends="fjbg.pack">
- <!-- TODO - jar or classfiles? -->
- <path id="fjbg.classpath">
- <pathelement location="${build-libs.dir}/classes/fjbg"/>
- </path>
- </target>
-
- <target name="fjbg.clean" depends="init">
- <delete dir="${build-libs.dir}/classes/fjbg" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- <delete file="${build-libs.dir}/fjbg.complete" quiet="yes" failonerror="no"/>
- </target>
-
-<!-- ===========================================================================
LOCAL REFERENCE BUILD (LOCKER)
============================================================================ -->
- <target name="locker.start" depends="asm.done, forkjoin.done, fjbg.done">
+ <target name="locker.start" depends="asm.done, forkjoin.done">
<condition property="locker.available">
<available file="${build-locker.dir}/all.complete"/>
</condition>
@@ -669,9 +620,6 @@ LOCAL REFERENCE BUILD (LOCKER)
<pathelement location="${build-locker.dir}/classes/library"/>
</classpath>
</javac>
- <!-- NOTE: Potential problem with maximal command line length on Windows
- (32768 characters for XP, since executed with Java's "exec"). See
- src/build/msil.xml in msil branch for more details. -->
<scalacfork
destdir="${build-locker.dir}/classes/library"
compilerpathref="starr.classpath"
@@ -761,43 +709,6 @@ LOCAL REFERENCE BUILD (LOCKER)
<target name="locker.comp" depends="locker.pre-comp" if="locker.comp.needed">
<stopwatch name="locker.comp.timer"/>
<mkdir dir="${build-locker.dir}/classes/compiler"/>
- <if>
- <equals arg1="${fastlocker}" arg2="true" />
- <then>
- <!-- Fastlocker build: don't compile MSIL, use its starr version.... -->
- <property name="locker.comp.msil" value="${msil.starr.jar}"/>
- </then>
- <else>
- <!-- Regular build: Compile MSIL inside of locker.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-locker.dir}/classes/compiler"
- classpath="${build-locker.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-locker.dir}/classes/compiler"
- compilerpathref="starr.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <property name="locker.comp.msil" value="${build-locker.dir}/classes/compiler"/>
- </else>
- </if>
<scalacfork
destdir="${build-locker.dir}/classes/compiler"
compilerpathref="starr.classpath"
@@ -809,10 +720,8 @@ LOCAL REFERENCE BUILD (LOCKER)
<pathelement location="${build-locker.dir}/classes/library"/>
<pathelement location="${build-locker.dir}/classes/reflect"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
<path refid="aux.libs"/>
<path refid="asm.classpath"/>
- <pathelement location="${locker.comp.msil}" />
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
@@ -845,7 +754,6 @@ LOCAL REFERENCE BUILD (LOCKER)
<pathelement location="${build-locker.dir}/classes/library"/>
<pathelement location="${build-locker.dir}/classes/reflect"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
<path refid="forkjoin.classpath"/>
<path refid="asm.classpath"/>
<path refid="aux.libs"/>
@@ -855,7 +763,6 @@ LOCAL REFERENCE BUILD (LOCKER)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
<path refid="forkjoin.classpath"/>
<path refid="asm.classpath"/>
<path refid="aux.libs"/>
@@ -953,7 +860,6 @@ PACKED LOCKER BUILD (PALO)
<jar destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
<fileset dir="${build-locker.dir}/classes/compiler"/>
<fileset dir="${build-asm.dir}/classes/"/>
- <fileset dir="${build-libs.dir}/classes/fjbg"/>
</jar>
<copy file="${jline.jar}" toDir="${build-palo.dir}/lib"/>
</target>
@@ -1021,7 +927,6 @@ QUICK BUILD (QUICK)
<uptodate property="quick.lib.available" targetfile="${build-quick.dir}/library.complete">
<srcfiles dir="${src.dir}">
<include name="library/**"/>
- <include name="continuations/**"/>
<include name="swing/**"/>
<include name="actors/**"/>
</srcfiles>
@@ -1084,28 +989,14 @@ QUICK BUILD (QUICK)
<include name="**/*.xml"/>
<include name="**/*.js"/>
<include name="**/*.css"/>
+ <include name="rootdoc.txt"/>
</fileset>
</copy>
- </target>
-
- <target name="quick.swing" depends="quick.lib" if="has.java6" unless="quick.lib.available">
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- </target>
-
- <target name="quick.lib.done" depends="quick.swing, quick.lib">
- <stopwatch name="quick.lib.timer" action="total"/>
<touch file="${build-quick.dir}/library.complete" verbose="no"/>
+ <stopwatch name="quick.lib.timer" action="total"/>
</target>
- <target name="quick.pre-reflect" depends="quick.lib.done">
+ <target name="quick.pre-reflect" depends="quick.lib">
<uptodate property="quick.reflect.available" targetfile="${build-quick.dir}/reflect.complete">
<srcfiles dir="${src.dir}">
<include name="reflect/**"/>
@@ -1126,11 +1017,11 @@ QUICK BUILD (QUICK)
<compilationpath>
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
+ <path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
<pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
- <!-- TODO - needed? -->
<propertyfile file="${build-quick.dir}/classes/reflect/reflect.properties">
<entry key="version.number" value="${version.number}"/>
<entry key="maven.version.number" value="${maven.version.number}"/>
@@ -1163,33 +1054,6 @@ QUICK BUILD (QUICK)
<target name="quick.comp" depends="quick.pre-comp" unless="quick.comp.available">
<stopwatch name="quick.comp.timer"/>
<mkdir dir="${build-quick.dir}/classes/compiler"/>
- <!-- Compile MSIL inside of quick.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-quick.dir}/classes/compiler"
- classpath="${build-quick.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-quick.dir}/classes/compiler"
- compilerpathref="locker.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
<scalacfork
destdir="${build-quick.dir}/classes/compiler"
compilerpathref="locker.classpath"
@@ -1203,7 +1067,6 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/compiler"/>
<path refid="aux.libs"/>
<path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
<path refid="asm.classpath"/>
<pathelement location="${jline.jar}"/>
</compilationpath>
@@ -1232,7 +1095,45 @@ QUICK BUILD (QUICK)
<stopwatch name="quick.comp.timer" action="total"/>
</target>
- <target name="quick.pre-plugins" depends="quick.comp">
+ <target name="quick.pre-repl" depends="quick.comp">
+ <uptodate property="quick.repl.available" targetfile="${build-quick.dir}/repl.complete">
+ <srcfiles dir="${src.dir}/repl" />
+ </uptodate>
+ </target>
+
+ <target name="quick.repl" depends="quick.pre-repl" unless="quick.repl.available">
+ <mkdir dir="${build-quick.dir}/classes/repl"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/repl"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/repl"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
+ <pathelement location="${jline.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-quick.dir}/repl.complete" verbose="no"/>
+ </target>
+
+ <target name="quick.swing" depends="quick.comp" if="has.java6" unless="quick.comp.available">
+ <scalacfork
+ destdir="${build-quick.dir}/classes/library"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/swing"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="quick.compilation.path"/>
+ </scalacfork>
+ </target>
+
+ <target name="quick.pre-plugins" depends="quick.repl" unless="quick.repl.available">
<uptodate property="quick.plugins.available" targetfile="${build-quick.dir}/plugins.complete">
<srcfiles dir="${src.dir}/continuations"/>
</uptodate>
@@ -1241,6 +1142,7 @@ QUICK BUILD (QUICK)
<target name="quick.plugins" depends="quick.pre-plugins" unless="quick.plugins.available">
<stopwatch name="quick.plugins.timer"/>
<mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
+ <mkdir dir="${build-quick.dir}/classes/continuations-library"/>
<scalacfork
destdir="${build-quick.dir}/classes/continuations-plugin"
compilerpathref="quick.classpath"
@@ -1254,7 +1156,6 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
<path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
<path refid="aux.libs"/>
</compilationpath>
</scalacfork>
@@ -1268,7 +1169,7 @@ QUICK BUILD (QUICK)
</jar>
<!-- might split off library part into its own ant target -->
<scalacfork
- destdir="${build-quick.dir}/classes/library"
+ destdir="${build-quick.dir}/classes/continuations-library"
compilerpathref="quick.classpath"
params="${scalac.args.quick} -Xplugin-require:continuations -P:continuations:enable"
srcdir="${src.dir}/continuations/library"
@@ -1354,6 +1255,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<path refid="asm.classpath"/>
@@ -1372,11 +1274,11 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
<path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
<path refid="asm.classpath"/>
<pathelement location="${scalacheck.jar}"/>
</compilationpath>
@@ -1394,7 +1296,86 @@ QUICK BUILD (QUICK)
<stopwatch name="quick.partest.timer" action="total"/>
</target>
- <target name="quick.pre-bin" depends="quick.partest">
+ <target name="quick.pre-scaladoc" depends="quick.partest">
+ <uptodate property="quick.scaladoc.available" targetfile="${build-quick.dir}/scaladoc.complete">
+ <srcfiles dir="${src.dir}/scaladoc" />
+ </uptodate>
+ </target>
+
+ <target name="quick.scaladoc" depends="quick.pre-scaladoc" unless="quick.scaladoc.available">
+ <mkdir dir="${build-quick.dir}/classes/scaladoc"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/scaladoc"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/scaladoc"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
+ <pathelement location="${build-quick.dir}/classes/scaladoc"/>
+ <pathelement location="${ant.jar}"/>
+ <path refid="forkjoin.classpath"/>
+ <pathelement location="${scalacheck.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <propertyfile file="${build-quick.dir}/classes/scaladoc/scaladoc.properties">
+ <entry key="version.number" value="${scaladoc.version.number}"/>
+ <entry key="copyright.string" value="${copyright.string}"/>
+ </propertyfile>
+ <copy todir="${build-quick.dir}/classes/scaladoc">
+ <fileset dir="${src.dir}/scaladoc">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ <include name="**/*.html"/>
+ <include name="**/*.properties"/>
+ <include name="**/*.swf"/>
+ <include name="**/*.png"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.txt"/>
+ </fileset>
+ </copy>
+ <touch file="${build-quick.dir}/scaladoc.complete" verbose="no"/>
+ </target>
+
+ <target name="quick.pre-interactive" depends="quick.scaladoc">
+ <uptodate property="quick.interactive.available" targetfile="${build-quick.dir}/interactive.complete">
+ <srcfiles dir="${src.dir}/interactive" />
+ </uptodate>
+ </target>
+
+ <target name="quick.interactive" depends="quick.pre-interactive" unless="quick.interactive.available">
+ <mkdir dir="${build-quick.dir}/classes/interactive"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/interactive"
+ compilerpathref="quick.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/interactive"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
+ <pathelement location="${build-quick.dir}/classes/scaladoc"/>
+ <pathelement location="${build-quick.dir}/classes/interactive"/>
+ <pathelement location="${ant.jar}"/>
+ <path refid="forkjoin.classpath"/>
+ <pathelement location="${scalacheck.jar}"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-quick.dir}/interactive.complete" verbose="no"/>
+ </target>
+
+ <target name="quick.pre-bin" depends="quick.interactive">
<condition property="quick.bin.available">
<isset property="quick.comp.available"/>
</condition>
@@ -1405,12 +1386,12 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${jline.jar}"/>
+ <path refid="asm.classpath"/>
<path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
<path refid="aux.libs"/>
- <path refid="asm.classpath"/>
- <pathelement location="${jline.jar}"/>
</path>
<taskdef name="quick-bin" classname="scala.tools.ant.ScalaTool" classpathref="quick.bin.classpath"/>
<mkdir dir="${build-quick.dir}/bin"/>
@@ -1486,6 +1467,7 @@ PACKED QUICK BUILD (PACK)
<exclude name="scala/swing/**"/>
<exclude name="scala/actors/**"/>
</fileset>
+ <fileset dir="${build-quick.dir}/classes/continuations-library"/>
<fileset dir="${build-libs.dir}/classes/forkjoin"/>
</jar>
<jar destfile="${build-pack.dir}/lib/scala-actors.jar">
@@ -1529,12 +1511,16 @@ PACKED QUICK BUILD (PACK)
<copy file="META-INF/MANIFEST.MF" toDir="${build-pack.dir}/META-INF"/>
<manifest file="${build-pack.dir}/META-INF/MANIFEST.MF" mode="update">
<attribute name="Bundle-Version" value="${version.number}"/>
+ <attribute name="Class-Path" value="scala-reflect.jar scala-library.jar"/>
</manifest>
<mkdir dir="${build-pack.dir}/lib"/>
<jar destfile="${build-pack.dir}/lib/scala-compiler.jar" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
+ <service type="javax.script.ScriptEngineFactory" provider="scala.tools.nsc.interpreter.IMain$Factory"/>
<fileset dir="${build-quick.dir}/classes/compiler"/>
+ <fileset dir="${build-quick.dir}/classes/scaladoc"/>
+ <fileset dir="${build-quick.dir}/classes/interactive"/>
+ <fileset dir="${build-quick.dir}/classes/repl"/>
<fileset dir="${build-asm.dir}/classes"/>
- <fileset dir="${build-libs.dir}/classes/fjbg"/>
</jar>
<copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
<copy todir="${build-pack.dir}/lib">
@@ -1657,6 +1643,11 @@ PACKED QUICK BUILD (PACK)
<pathelement location="${jline.jar}"/>
<path refid="lib.extra"/>
</path>
+ <path id="partest.classpath">
+ <path refid="pack.classpath"/>
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ <pathelement location="${pack.dir}/lib/scala-swing.jar"/> <!-- TODO - segregate swing tests (there can't be many) -->
+ </path>
<taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.classpath"/>
<taskdef resource="scala/tools/partest/antlib.xml" classpathref="pack.classpath"/>
</target>
@@ -1841,7 +1832,7 @@ BOOTSTRAPPING BUILD (STRAP)
destdir="${build-strap.dir}/classes/library"
compilerpathref="pack.classpath"
srcpath="${src.dir}/library"
- params="${scalac.args.quick}"
+ params="${scalac.args.strap}"
srcdir="${src.dir}/library"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
@@ -1850,7 +1841,7 @@ BOOTSTRAPPING BUILD (STRAP)
<scalacfork
destdir="${build-strap.dir}/classes/library"
compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
+ params="${scalac.args.strap}"
srcdir="${src.dir}/actors"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
@@ -1870,28 +1861,15 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.css"/>
</fileset>
</copy>
- </target>
-
- <target name="strap.swing" if="has.java6" unless="strap.lib.available" depends="strap.lib">
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- </target>
-
- <target name="strap.lib.done" depends="strap.swing, strap.lib">
<touch file="${build-strap.dir}/library.complete" verbose="no"/>
<stopwatch name="strap.lib.timer" action="total"/>
</target>
- <target name="strap.pre-reflect" depends="strap.lib.done">
+ <target name="strap.pre-reflect" depends="strap.lib">
<uptodate property="strap.reflect.available" targetfile="${build-strap.dir}/reflect.complete">
- <srcfiles dir="${src.dir}/reflect"/>
+ <srcfiles dir="${src.dir}">
+ <include name="reflect/**"/>
+ </srcfiles>
</uptodate>
</target>
@@ -1934,7 +1912,7 @@ BOOTSTRAPPING BUILD (STRAP)
</fileset>
</copy>
<touch file="${build-strap.dir}/reflect.complete" verbose="no"/>
- <stopwatch name="strap.comp.timer" action="total"/>
+ <stopwatch name="strap.reflect.timer" action="total"/>
</target>
<target name="strap.pre-comp" depends="strap.reflect">
@@ -1946,37 +1924,10 @@ BOOTSTRAPPING BUILD (STRAP)
<target name="strap.comp" depends="strap.pre-comp" unless="strap.comp.available">
<stopwatch name="strap.comp.timer"/>
<mkdir dir="${build-strap.dir}/classes/compiler"/>
- <!-- Compile MSIL inside of strap.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-strap.dir}/classes/compiler"
- classpath="${build-strap.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/compiler"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
<scalacfork
destdir="${build-strap.dir}/classes/compiler"
compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
+ params="${scalac.args.strap}"
srcdir="${src.dir}/compiler"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
@@ -1986,7 +1937,6 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/compiler"/>
<path refid="aux.libs"/>
<path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
<path refid="asm.classpath"/>
<pathelement location="${jline.jar}"/>
</compilationpath>
@@ -2015,161 +1965,14 @@ BOOTSTRAPPING BUILD (STRAP)
<stopwatch name="strap.comp.timer" action="total"/>
</target>
- <target name="strap.pre-plugins" depends="strap.comp">
- <uptodate property="strap.plugins.available" targetfile="${build-strap.dir}/plugins.complete">
- <srcfiles dir="${src.dir}/continuations"/>
- </uptodate>
- </target>
-
- <target name="strap.plugins" depends="strap.pre-plugins" unless="strap.plugins.available">
- <stopwatch name="strap.plugins.timer"/>
- <mkdir dir="${build-strap.dir}/classes/continuations-plugin"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/continuations-plugin"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/continuations/plugin"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/continuations-plugin"/>
- <path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- </compilationpath>
- </scalacfork>
- <copy
- file="${src.dir}/continuations/plugin/scalac-plugin.xml"
- todir="${build-strap.dir}/classes/continuations-plugin"/>
- <!-- not very nice to create jar here but needed to load plugin -->
- <mkdir dir="${build-strap.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-strap.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-strap.dir}/classes/continuations-plugin"/>
- </jar>
- <!-- might split off library part into its own ant target -->
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick} -Xplugin-require:continuations -P:continuations:enable"
- srcdir="${src.dir}/continuations/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- <compilerarg value="-Xpluginsdir"/>
- <compilerarg file="${build-strap.dir}/misc/scala-devel/plugins"/>
- </scalacfork>
- <touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
- <stopwatch name="strap.plugins.timer" action="total"/>
- </target>
-
- <target name="strap.scalacheck" depends="strap.plugins">
- <mkdir dir="${build-strap.dir}/classes/scalacheck"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalacheck"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick} -nowarn"
- srcdir="${src.dir}/scalacheck"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
- </scalacfork>
- </target>
-
- <target name="strap.pre-scalap" depends="strap.scalacheck">
- <uptodate property="strap.scalap.available" targetfile="${build-strap.dir}/scalap.complete">
- <srcfiles dir="${src.dir}/scalap"/>
- </uptodate>
- </target>
-
- <target name="strap.scalap" depends="strap.pre-scalap" unless="strap.scalap.available">
- <stopwatch name="strap.scalap.timer"/>
- <mkdir dir="${build-strap.dir}/classes/scalap"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalap"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/scalap"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-strap.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="strap.scalap.timer" action="total"/>
- </target>
-
- <target name="strap.pre-partest" depends="strap.scalap, asm.done">
- <uptodate property="strap.partest.available" targetfile="${build-strap.dir}/partest.complete">
- <srcfiles dir="${src.dir}/partest"/>
- </uptodate>
- </target>
-
- <target name="strap.partest" depends="strap.pre-partest" unless="strap.partest.available">
- <stopwatch name="strap.partest.timer"/>
- <mkdir dir="${build-strap.dir}/classes/partest"/>
- <javac
- srcdir="${src.dir}/partest"
- destdir="${build-strap.dir}/classes/partest"
- target="1.6" source="1.5">
- <classpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <path refid="asm.classpath"/>
- </classpath>
- <include name="**/*.java"/>
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/partest"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/partest"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- <path refid="asm.classpath"/>
- <pathelement location="${scalacheck.jar}"/>
- </compilationpath>
- </scalacfork>
- <copy todir="${build-strap.dir}/classes/partest">
- <fileset dir="${src.dir}/partest">
- <include name="**/*.xml"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/partest.complete" verbose="no"/>
- <stopwatch name="strap.partest.timer" action="total"/>
- </target>
-
- <target name="strap.done" depends="strap.partest"/>
+ <target name="strap.done" depends="strap.comp"/>
<target name="strap.clean">
<delete dir="${build-strap.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
</target>
<!-- ===========================================================================
-LIBRARIES (Forkjoin, FJBG, ASM)
+LIBRARIES (Forkjoin, ASM)
============================================================================ -->
@@ -2230,6 +2033,8 @@ SBT Compiler Interface
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/scaladoc"/>
+ <pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${sbt.interface.jar}"/>
<path refid="forkjoin.classpath"/>
</compilationpath>
@@ -2526,13 +2331,10 @@ DOCUMENTATION
BOOTRAPING TEST AND TEST SUITE
============================================================================ -->
- <target name="test.stability" depends="strap.done, init">
- <same dir="${build-quick.dir}" todir="${build-strap.dir}" failondifferent="yes">
- <exclude name="**/*.properties"/>
- <exclude name="bin/**"/>
- <exclude name="*.complete"/>
- <exclude name="misc/scala-devel/plugins/*.jar"/>
- </same>
+ <target name="test.stability" depends="pack.done, strap.done">
+ <exec osfamily="unix" vmlauncher="false" executable="${basedir}/tools/stability-test.sh" failonerror="true" />
+ <!-- I think doing it this way means it will auto-pass on windows... that's the idea. If not, something like this. -->
+ <!-- <exec osfamily="windows" executable="foo" failonerror="false" failifexecutionfails="false" /> -->
</target>
<target name="test.classload" depends="pack.done">
@@ -2551,53 +2353,44 @@ BOOTRAPING TEST AND TEST SUITE
</antcall>
</target>
- <target name="test.run" depends="pack.done">
+ <target name="test.pre-run" depends="pack.done">
<property name="partest.srcdir" value="files" />
+ </target>
+
+ <target name="test.run" depends="test.pre-run">
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="1200000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+
+ <compilationpath refid="partest.classpath"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
<jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
</partest>
</target>
- <target name="test.ant" depends="pack.done">
+ <target name="test.ant" depends="test.pre-run">
<property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<anttests dir="${partest.dir}/${partest.srcdir}/ant" includes="*build.xml"/>
</partest>
</target>
- <target name="test.suite" depends="pack.done">
+ <target name="test.suite" depends="test.pre-run">
<property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<postests dir="${partest.dir}/${partest.srcdir}/pos" includes="*.scala"/>
<negtests dir="${partest.dir}/${partest.srcdir}/neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
<jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
<residenttests dir="${partest.dir}/${partest.srcdir}/res" includes="*.res"/>
- <buildmanagertests dir="${partest.dir}/${partest.srcdir}/buildmanager" includes="*"/>
<scalaptests dir="${partest.dir}/${partest.srcdir}/scalap" includes="**/*.scala"/>
<scalachecktests dir="${partest.dir}/${partest.srcdir}/scalacheck">
<include name="*.scala"/>
@@ -2608,14 +2401,10 @@ BOOTRAPING TEST AND TEST SUITE
<instrumentedtests dir="${partest.dir}/${partest.srcdir}/instrumented">
<include name="*.scala"/>
</instrumentedtests>
- <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
- <include name="*/*.scala"/>
- </presentationtests>
- <!-- <scripttests dir="${partest.dir}/${partest.srcdir}/script" includes="*.scala"/> -->
</partest>
</target>
- <target name="test.continuations.suite" depends="pack.done">
+ <target name="test.continuations.suite" depends="test.pre-run">
<property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
@@ -2623,27 +2412,30 @@ BOOTRAPING TEST AND TEST SUITE
scalacopts="${scalac.args.optimise} -Xplugin-require:continuations -P:continuations:enable">
<compilerarg value="-Xpluginsdir"/>
<compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<negtests dir="${partest.dir}/${partest.srcdir}/continuations-neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/continuations-run" includes="*.scala"/>
</partest>
</target>
- <target name="test.scaladoc" depends="pack.done">
+ <target name="test.scaladoc" depends="test.pre-run">
<partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.classpath"/>
<runtests dir="${partest.dir}/scaladoc/run" includes="*.scala" />
<scalachecktests dir="${partest.dir}/scaladoc/scalacheck" includes="*.scala" />
</partest>
</target>
- <target name="test.done" depends="test.suite, test.continuations.suite, test.scaladoc, test.stability, test.sbt, osgi.test, bc.run"/>
+ <target name="test.interactive" depends="test.pre-run">
+ <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
+ <compilationpath refid="partest.classpath"/>
+ <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
+ <include name="*/*.scala"/>
+ </presentationtests>
+ </partest>
+ </target>
+
+ <target name="test.done" depends="test.suite, test.continuations.suite, test.scaladoc, test.interactive, test.stability, test.sbt"/>
<!-- ===========================================================================
@@ -2656,16 +2448,15 @@ Binary compatibility testing
<mkdir dir="${bc-build.dir}"/>
<!-- Pull down MIMA -->
<artifact:dependencies pathId="mima.classpath">
- <dependency groupId="com.typesafe" artifactId="mima-reporter_2.9.2" version="0.1.5"/>
+ <dependency groupId="com.typesafe" artifactId="mima-reporter_2.9.2" version="0.1.4"/>
</artifact:dependencies>
<artifact:dependencies pathId="old.bc.classpath">
- <dependency groupId="org.scala-lang" artifactId="scala-swing" version="2.10.0"/>
- <dependency groupId="org.scala-lang" artifactId="scala-library" version="2.10.0"/>
- <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="2.10.0"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-library" version="2.10.0-RC2"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="2.10.0-RC2"/>
</artifact:dependencies>
</target>
- <target name="bc.run" depends="bc.init, pack.done">
+ <target name="bc.run" depends="bc.init, pack.lib">
<java
fork="true"
failonerror="true"
@@ -2674,9 +2465,6 @@ Binary compatibility testing
<arg value="${org.scala-lang:scala-library:jar}"/>
<arg value="--curr"/>
<arg value="${build-pack.dir}/lib/scala-library.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-backward.whitelist.conf"/>
- <arg value="--generate-filters"/>
<classpath>
<path refid="mima.classpath"/>
</classpath>
@@ -2689,69 +2477,6 @@ Binary compatibility testing
<arg value="${org.scala-lang:scala-reflect:jar}"/>
<arg value="--curr"/>
<arg value="${build-pack.dir}/lib/scala-reflect.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-backward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--prev"/>
- <arg value="${org.scala-lang:scala-swing:jar}"/>
- <arg value="--curr"/>
- <arg value="${build-pack.dir}/lib/scala-swing.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-backward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--curr"/>
- <arg value="${org.scala-lang:scala-library:jar}"/>
- <arg value="--prev"/>
- <arg value="${build-pack.dir}/lib/scala-library.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-forward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--curr"/>
- <arg value="${org.scala-lang:scala-reflect:jar}"/>
- <arg value="--prev"/>
- <arg value="${build-pack.dir}/lib/scala-reflect.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-forward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--curr"/>
- <arg value="${org.scala-lang:scala-swing:jar}"/>
- <arg value="--prev"/>
- <arg value="${build-pack.dir}/lib/scala-swing.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-forward.whitelist.conf"/>
- <arg value="--generate-filters"/>
<classpath>
<path refid="mima.classpath"/>
</classpath>
@@ -2949,8 +2674,6 @@ STABLE REFERENCE (STARR)
<jar destfile="${basedir}/lib/scala-compiler-src.jar">
<fileset dir="${basedir}/src/compiler"/>
<fileset dir="${basedir}/src/asm"/>
- <fileset dir="${basedir}/src/fjbg"/>
- <fileset dir="${basedir}/src/msil"/>
</jar>
</target>
diff --git a/docs/LICENSE b/docs/LICENSE
index de950bdf57..a9c254ed49 100644
--- a/docs/LICENSE
+++ b/docs/LICENSE
@@ -1,6 +1,6 @@
SCALA LICENSE
-Copyright (c) 2002-2012 EPFL, Lausanne, unless otherwise specified.
+Copyright (c) 2002-2013 EPFL, Lausanne, unless otherwise specified.
All rights reserved.
This software was developed by the Programming Methods Laboratory of the
diff --git a/docs/examples/actors/pingpong.scala b/docs/examples/actors/pingpong.scala
index 4ed225c662..c355bee244 100644
--- a/docs/examples/actors/pingpong.scala
+++ b/docs/examples/actors/pingpong.scala
@@ -13,7 +13,7 @@ case object Stop
* @author Philipp Haller
* @version 1.1
*/
-object pingpong extends Application {
+object pingpong extends App {
val pong = new Pong
val ping = new Ping(100000, pong)
ping.start
diff --git a/docs/examples/jolib/Ref.scala b/docs/examples/jolib/Ref.scala
index 5f655f16b1..32952b4351 100644
--- a/docs/examples/jolib/Ref.scala
+++ b/docs/examples/jolib/Ref.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/docs/examples/jolib/parallelOr.scala b/docs/examples/jolib/parallelOr.scala
index 72d282bee3..fb8288c5b2 100644
--- a/docs/examples/jolib/parallelOr.scala
+++ b/docs/examples/jolib/parallelOr.scala
@@ -1,10 +1,10 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
-\* */
+\* */
package examples.jolib;
/*
diff --git a/docs/examples/parsing/ArithmeticParser.scala b/docs/examples/parsing/ArithmeticParser.scala
index 99cf7a5578..c272fa0ba0 100644
--- a/docs/examples/parsing/ArithmeticParser.scala
+++ b/docs/examples/parsing/ArithmeticParser.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/docs/examples/swing/ColorChooserDemo.scala b/docs/examples/swing/ColorChooserDemo.scala
new file mode 100644
index 0000000000..1cb2bdefa2
--- /dev/null
+++ b/docs/examples/swing/ColorChooserDemo.scala
@@ -0,0 +1,61 @@
+package examples.swing
+
+import java.awt.{Color, Font, Dimension}
+import swing._
+import event._
+import Swing._
+import BorderPanel._
+
+/**
+ * Demo for ColorChooser.
+ * Based on http://download.oracle.com/javase/tutorial/uiswing/components/colorchooser.html
+ *
+ * @author andy@hicks.net
+ */
+object ColorChooserDemo extends SimpleSwingApplication {
+ def top = new MainFrame {
+ title = "ColorChooser Demo"
+ size = new Dimension(400, 400)
+
+ contents = ui
+ }
+
+ def ui = new BorderPanel {
+ val colorChooser = new ColorChooser {
+ reactions += {
+ case ColorChanged(_, c) =>
+ banner.foreground = c
+ }
+ }
+
+ colorChooser.border = TitledBorder(EtchedBorder, "Choose Text Color")
+
+ val banner = new Label("Welcome to Scala Swing") {
+ horizontalAlignment = Alignment.Center
+ foreground = Color.yellow
+ background = Color.blue
+ opaque = true
+ font = new Font("SansSerif", Font.BOLD, 24)
+ }
+
+ val bannerArea = new BorderPanel {
+ layout(banner) = Position.Center
+ border = TitledBorder(EtchedBorder, "Banner")
+ }
+
+ // Display a color selection dialog when button pressed
+ val selectColor = new Button("Choose Background Color") {
+ reactions += {
+ case ButtonClicked(_) =>
+ ColorChooser.showDialog(this, "Test", Color.red) match {
+ case Some(c) => banner.background = c
+ case None =>
+ }
+ }
+ }
+
+ layout(bannerArea) = Position.North
+ layout(colorChooser) = Position.Center
+ layout(selectColor) = Position.South
+ }
+} \ No newline at end of file
diff --git a/docs/examples/swing/PopupDemo.scala b/docs/examples/swing/PopupDemo.scala
new file mode 100644
index 0000000000..6a9eeb125b
--- /dev/null
+++ b/docs/examples/swing/PopupDemo.scala
@@ -0,0 +1,33 @@
+package examples.swing
+
+import swing._
+import event._
+import Swing._
+
+/**
+ * @author John Sullivan
+ * @author Ingo Maier
+ */
+object PopupDemo extends SimpleSwingApplication {
+ def top = new MainFrame {
+ val popupMenu = new PopupMenu {
+ contents += new Menu("menu 1") {
+ contents += new RadioMenuItem("radio 1.1")
+ contents += new RadioMenuItem("radio 1.2")
+ }
+ contents += new Menu("menu 2") {
+ contents += new RadioMenuItem("radio 2.1")
+ contents += new RadioMenuItem("radio 2.2")
+ }
+ }
+ val button = new Button("Show Popup Menu")
+ reactions += {
+ case ButtonClicked(b) => popupMenu.show(b, 0, b.bounds.height)
+ case PopupMenuCanceled(m) => println("Menu " + m + " canceled.")
+ }
+ listenTo(popupMenu)
+ listenTo(button)
+
+ contents = new FlowPanel(button)
+ }
+} \ No newline at end of file
diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1
deleted file mode 100644
index 6f3ccc77bd..0000000000
--- a/lib/fjbg.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8acc87f222210b4a5eb2675477602fc1759e7684 *fjbg.jar
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
deleted file mode 100644
index 9396b273ab..0000000000
--- a/lib/msil.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d48cb950ceded82a5e0ffae8ef2c68d0923ed00c *msil.jar
diff --git a/lib/scala-compiler-src.jar.desired.sha1 b/lib/scala-compiler-src.jar.desired.sha1
index 082d86ff67..7dae1a905e 100644
--- a/lib/scala-compiler-src.jar.desired.sha1
+++ b/lib/scala-compiler-src.jar.desired.sha1
@@ -1 +1 @@
-cfa3ee21f76cd5c115bd3bc070a3b401587bafb5 ?scala-compiler-src.jar
+f9f41fb909df6a0178906c9fd02e5d0efa15c9ed ?scala-compiler-src.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index bb39b4d6a6..bb22879679 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-d54b99f215d4d42b3f0b3489fbb1081270700992 ?scala-compiler.jar
+c4cd524dc29d298a5034637f6b31122dccb300d6 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index cd42c23291..641324256e 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-8bdac1cdd60b73ff7e12fd2b556355fa10343e2d ?scala-library-src.jar
+2cba5a13ef44bf93133be26cc89ba3a640a5c28f ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 6bdeaa903b..1b4eeac625 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-1e0e39fae15b42e85998740511ec5a3830e26243 ?scala-library.jar
+d7c6e69eba3dba2f75a0f44e56480cd3dbab8931 ?scala-library.jar
diff --git a/lib/scala-reflect-src.jar.desired.sha1 b/lib/scala-reflect-src.jar.desired.sha1
index d630c938f2..f9d9618380 100644
--- a/lib/scala-reflect-src.jar.desired.sha1
+++ b/lib/scala-reflect-src.jar.desired.sha1
@@ -1 +1 @@
-d229f4c91ea8ab1a81559b5803efd9b0b1632f0b ?scala-reflect-src.jar
+dbc00bd4b09012aa52e802926dee3f8a02a767ff ?scala-reflect-src.jar
diff --git a/lib/scala-reflect.jar.desired.sha1 b/lib/scala-reflect.jar.desired.sha1
index a5d6701749..8663ef7c85 100644
--- a/lib/scala-reflect.jar.desired.sha1
+++ b/lib/scala-reflect.jar.desired.sha1
@@ -1 +1 @@
-288f47dbe1002653e030fd25ca500b9ffe1ebd64 ?scala-reflect.jar
+c0eed5dee0a3204239c9b35134cad8b3ad140121 ?scala-reflect.jar
diff --git a/project/Build.scala b/project/Build.scala
index a50a572d54..efa8a7a038 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -11,7 +11,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
override lazy val settings = super.settings ++ Versions.settings ++ Seq(
autoScalaLibrary := false,
resolvers += Resolver.url(
- "Typesafe nightlies",
+ "Typesafe nightlies",
url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
)(Resolver.ivyStylePatterns),
resolvers ++= Seq(
@@ -21,14 +21,14 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
organization := "org.scala-lang",
version <<= Versions.mavenVersion,
pomExtra := epflPomExtra
- )
+ )
// Collections of projects to run 'compile' on.
- lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin, fjbg)
+ lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin)
// Collection of projects to 'package' and 'publish' together.
lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, continuationsPlugin, jline, scalap)
lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
-
+
private def epflPomExtra = (
<xml:group>
<inceptionYear>2002</inceptionYear>
@@ -47,7 +47,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
</issueManagement>
</xml:group>
)
-
+
// Settings used to make sure publishing goes smoothly.
def publishSettings: Seq[Setting[_]] = Seq(
ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
@@ -82,7 +82,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
// Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
- Seq("library/scala","actors","compiler","fjbg","swing","continuations/library","forkjoin") map (dir / _)
+ Seq("library/scala","actors","compiler","swing","continuations/library","forkjoin") map (dir / _)
},
// TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
commands += Release.pushStarr
@@ -91,7 +91,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
// External dependencies used for various projects
- lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
+ lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
Seq(
"org.apache.ant" % "ant" % "1.8.2",
"org.scala-sbt" % "compiler-interface" % v % "provided"
@@ -132,9 +132,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
// Jline nested project. Compile this sucker once and be done.
lazy val jline = Project("jline", file("src/jline"))
- // Fast Java Bytecode Generator (nested in every scala-compiler.jar)
- lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*)
- // Our wrapped version of msil.
+ // Our wrapped version of asm.
lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*)
// Forkjoin backport
lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
@@ -175,9 +173,9 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
// --------------------------------------------------------------
// Projects dependent on layered compilation (quick)
// --------------------------------------------------------------
- def addCheaterDependency(projectName: String): Setting[_] =
- pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
- val dependency: scala.xml.Node =
+ def addCheaterDependency(projectName: String): Setting[_] =
+ pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
+ val dependency: scala.xml.Node =
<dependency>
<groupId>{o}</groupId>
<artifactid>{projectName}</artifactid>
@@ -193,10 +191,10 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
case n: scala.xml.Elem if n.label == "dependencies" => n
} isEmpty)
// TODO - Keep namespace on project...
- k andThen {
+ k andThen {
case n @ <project>{ nested@_*}</project> if hasDependencies(n) =>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
- case <project>{ nested@_*}</project> =>
+ case <project>{ nested@_*}</project> =>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
}
}
@@ -205,7 +203,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
- // This project will generate man pages (in man1 and html) for scala.
+ // This project will generate man pages (in man1 and html) for scala.
lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
@@ -234,7 +232,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
- scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
+ scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
}
)
@@ -283,7 +281,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
// --------------------------------------------------------------
// Real Compiler Artifact
// --------------------------------------------------------------
- lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val packageScalaBinTask = Seq(quickComp, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
name := "scala-compiler",
crossPaths := false,
@@ -297,11 +295,11 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect)
lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler)
-
+
// --------------------------------------------------------------
// Generating Documentation.
// --------------------------------------------------------------
-
+
// TODO - Migrate this into the dist project.
// Scaladocs
lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
@@ -331,6 +329,6 @@ object ScalaBuild extends Build with Layers with Packaging with Testing {
lazy val documentation = (
Project("documentation", file("."))
settings (documentationSettings: _*)
- dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
+ dependsOn(quickLib, quickComp, actors, forkjoin, swing, continuationsLibrary)
)
}
diff --git a/project/Layers.scala b/project/Layers.scala
index 35cc79c130..6c939d0ff7 100644
--- a/project/Layers.scala
+++ b/project/Layers.scala
@@ -13,8 +13,6 @@ trait Layers extends Build {
def jline: Project
/** Reference to forkjoin library */
def forkjoin: Project
- /** Reference to Fast-Java-Bytecode-Generator library */
- def fjbg: Project
/** Reference to the ASM wrapped project. */
def asm: Project
/** A setting that adds some external dependencies. */
@@ -23,7 +21,7 @@ trait Layers extends Build {
def aaa_root: Project
/** Creates a reference Scala version that can be used to build other projects. This takes in the raw
- * library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
+ * library, compiler as well as a string representing the layer name (used for compiling the compile-interface).
*/
def makeScalaReference(layer: String, library: Project, reflect: Project, compiler: Project) =
scalaInstance <<= (appConfiguration in library,
@@ -31,10 +29,9 @@ trait Layers extends Build {
(exportedProducts in library in Compile),
(exportedProducts in reflect in Compile),
(exportedProducts in compiler in Compile),
- (exportedProducts in fjbg in Compile),
(fullClasspath in jline in Runtime),
(exportedProducts in asm in Runtime)) map {
- (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath, asm: Classpath) =>
+ (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, jline: Classpath, asm: Classpath) =>
val launcher = app.provider.scalaProvider.launcher
(lib,comp) match {
case (Seq(libraryJar), Seq(compilerJar)) =>
@@ -43,14 +40,14 @@ trait Layers extends Build {
libraryJar.data,
compilerJar.data,
launcher,
- ((fjbg.files ++ jline.files ++ asm.files ++ reflect.files):_*))
+ ((jline.files ++ asm.files ++ reflect.files):_*))
case _ => error("Cannot build a ScalaReference with more than one classpath element")
}
}
/** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version.
* Returns the library project and compiler project from the next layer.
- * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
+ * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors".
*/
def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = {
val autoLockSettings: Seq[Setting[_]] =
@@ -96,7 +93,6 @@ trait Layers extends Build {
version := layer,
scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
- unmanagedSourceDirectories in Compile <+= (baseDirectory) apply (_ / "src" / "msil"),
defaultExcludes := ("tests"),
defaultExcludes in unmanagedResources := "*.scala",
resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("compiler.properties"),
@@ -108,7 +104,7 @@ trait Layers extends Build {
dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
},
// TODO - Use depends on *and* SBT's magic dependency mechanisms...
- unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, fjbg, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
externalDeps,
referenceScala
)
diff --git a/project/Packaging.scala b/project/Packaging.scala
index eb4e69f99e..b0060283ac 100644
--- a/project/Packaging.scala
+++ b/project/Packaging.scala
@@ -19,12 +19,12 @@ trait Packaging { self: ScalaBuild.type =>
genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
// Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
- fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
+ fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
- // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
+ // TODO - We could *really* clean this up in many ways. Let's look into making a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
// a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
// really need to figure out a better way to pull jline + jansi.
makeDistMappings <<= (genBin,
diff --git a/project/Partest.scala b/project/Partest.scala
index fbb0a2a980..2ea41ba80b 100644
--- a/project/Partest.scala
+++ b/project/Partest.scala
@@ -33,11 +33,10 @@ object partest {
// What's fun here is that we want "*.scala" files *and* directories in the base directory...
def partestResources(base: File, testType: String): PathFinder = testType match {
case "res" => base ** "*.res"
- case "buildmanager" => base * "*"
// TODO - Only allow directories that have "*.scala" children...
case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
}
- lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "buildmanager", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
+ lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
// TODO - Figure out how to specify only a subset of resources...
def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
index cea2b2d6cc..e5b25a29cf 100644
--- a/project/ShaResolve.scala
+++ b/project/ShaResolve.scala
@@ -105,6 +105,7 @@ object ShaResolve {
def parseShaFile(file: File): (File, String) =
IO.read(file).split("\\s") match {
case Array(sha, filename) if filename.startsWith("?") => (new File(file.getParentFile, filename.drop(1)), sha)
+ case Array(sha, filename) if filename.startsWith("*") => (new File(file.getParentFile, filename.drop(1)), sha)
case Array(sha, filename) => (new File(file.getParentFile, filename), sha)
case _ => error(file.getAbsolutePath + " is an invalid sha file")
}
diff --git a/project/Testing.scala b/project/Testing.scala
index 5de72116a3..5b4135a31a 100644
--- a/project/Testing.scala
+++ b/project/Testing.scala
@@ -17,7 +17,7 @@ trait Testing { self: ScalaBuild.type =>
autoScalaLibrary := false
)
lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
- scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
+ scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
},
partestDirs <<= baseDirectory apply { bd =>
@@ -27,14 +27,14 @@ trait Testing { self: ScalaBuild.type =>
}
)
val testsuite = (
- Project("testsuite", file("."))
+ Project("testsuite", file("."))
settings (testsuiteSettings:_*)
- dependsOn (scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
+ dependsOn (scalaLibrary, scalaCompiler, partest, scalacheck)
)
val continuationsTestsuite = (
Project("continuations-testsuite", file("."))
- settings (continuationsTestsuiteSettings:_*)
- dependsOn (partest, scalaLibrary, scalaCompiler, fjbg)
+ settings (continuationsTestsuiteSettings:_*)
+ dependsOn (partest, scalaLibrary, scalaCompiler)
)
}
diff --git a/project/project/Build.scala b/project/project/Build.scala
index 902e8b0fb3..d3a08b62ba 100644
--- a/project/project/Build.scala
+++ b/project/project/Build.scala
@@ -3,5 +3,5 @@ object PluginDef extends Build {
override def projects = Seq(root)
lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git#sbt-0.12")
- lazy val git = uri("git://github.com/sbt/sbt-git-plugin.git#scala-build")
+ lazy val git = uri("git://github.com/sbt/sbt-git.git#scala-build")
}
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index 5a4e0d9804..3c6299aab4 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -15,6 +15,7 @@ import scala.language.higherKinds
*
* @define actor actor
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
type Future[+R] <: scala.actors.Future[R]
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 61124b3e85..75160fa18f 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -18,6 +18,7 @@ import scala.language.implicitConversions
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Actor extends Combinators {
/** State of an actor.
@@ -398,6 +399,7 @@ object Actor extends Combinators {
* @define channel actor's mailbox
*/
@SerialVersionUID(-781154067877019505L)
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait Actor extends InternalActor with ReplyReactor {
override def start(): Actor = synchronized {
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
index 5c1790669b..0da167aede 100644
--- a/src/actors/scala/actors/ActorRef.scala
+++ b/src/actors/scala/actors/ActorRef.scala
@@ -45,8 +45,9 @@ trait ActorRef {
* This is what is used to complete a Future that is returned from an ask/? call,
* when it times out.
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException {
def this(message: String) = this(message, null: Throwable)
}
-
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object PoisonPill
diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala
index 3d264777a0..3f2c53f423 100644
--- a/src/actors/scala/actors/CanReply.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -17,6 +17,7 @@ import scala.language.higherKinds
*
* @define actor `CanReply`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait CanReply[-T, +R] {
type Future[+P] <: () => P
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index 9669ffbc17..ddf7b329c8 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -23,6 +23,7 @@ import scala.concurrent.SyncVar
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class ! [a](ch: Channel[a], msg: a)
/**
@@ -34,6 +35,7 @@ case class ! [a](ch: Channel[a], msg: a)
* @define actor channel
* @define channel channel
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/DaemonActor.scala b/src/actors/scala/actors/DaemonActor.scala
index ffe8b75c27..04a4b4a40c 100644
--- a/src/actors/scala/actors/DaemonActor.scala
+++ b/src/actors/scala/actors/DaemonActor.scala
@@ -18,6 +18,7 @@ import scheduler.DaemonScheduler
*
* @author Erik Engbrecht
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait DaemonActor extends Actor {
override def scheduler: IScheduler = DaemonScheduler
}
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
index cc51dfdbae..31ef53bdbe 100644
--- a/src/actors/scala/actors/Debug.scala
+++ b/src/actors/scala/actors/Debug.scala
@@ -14,6 +14,7 @@ package scala.actors
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Debug extends Logger("") {}
private[actors] class Logger(tag: String) {
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 3037f82141..9d123cb2d5 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -21,6 +21,7 @@ import scala.concurrent.SyncVar
*
* @author Philipp Haller
*/
+@deprecated("Use the scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
abstract class Future[+T] extends Responder[T] with Function0[T] {
@volatile
@@ -107,6 +108,7 @@ private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T]) exten
*
* @author Philipp Haller
*/
+@deprecated("Use the object scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Futures {
/** Arranges for the asynchronous execution of `body`,
@@ -174,7 +176,7 @@ object Futures {
* or timeout + `System.currentTimeMillis()` is negative.
*/
def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
+ val resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
var cnt = 0
val mappedFts = fts.map(ft =>
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
index 35c2d32590..9d61d48561 100644
--- a/src/actors/scala/actors/IScheduler.scala
+++ b/src/actors/scala/actors/IScheduler.scala
@@ -17,6 +17,7 @@ package scala.actors
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait IScheduler {
/** Submits a closure for execution.
diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala
index 3d7dd7d49b..d2dd6d24df 100644
--- a/src/actors/scala/actors/InputChannel.scala
+++ b/src/actors/scala/actors/InputChannel.scala
@@ -16,6 +16,7 @@ package scala.actors
*
* @define channel `InputChannel`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait InputChannel[+Msg] {
/**
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
index ed9e25c1e6..5045ea56e8 100644
--- a/src/actors/scala/actors/InternalActor.scala
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -524,6 +524,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case object TIMEOUT
/**
@@ -534,6 +535,7 @@ case object TIMEOUT
* @param from the actor that terminated
* @param reason the reason that caused the actor to terminate
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class Exit(from: AbstractActor, reason: AnyRef)
/**
diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala
index 38295138d4..c744984fd8 100644
--- a/src/actors/scala/actors/InternalReplyReactor.scala
+++ b/src/actors/scala/actors/InternalReplyReactor.scala
@@ -12,6 +12,7 @@ import java.util.{TimerTask}
*
* @define actor `ReplyReactor`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply {
/* A list of the current senders. The head of the list is
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index fd87f813a0..f0f475e123 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -15,6 +15,7 @@ package scala.actors
*
* @define actor `OutputChannel`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait OutputChannel[-Msg] {
/**
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index f025f6bc29..aa985b3a17 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -52,6 +52,7 @@ private[actors] object Reactor {
*
* @define actor reactor
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
/* The $actor's mailbox. */
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index a2051d4354..01e6da000f 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -7,7 +7,7 @@
\* */
package scala.actors
-@deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait ReplyReactor extends InternalReplyReactor {
protected[actors] def sender: OutputChannel[Any] = super.internalSender
}
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index dd6c110ed3..5b5b4a946d 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -18,6 +18,7 @@ import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolSch
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Scheduler extends DelegatingScheduler {
Debug.info("initializing "+this+"...")
diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala
index fb28b3f93a..b8e66dd6cc 100644
--- a/src/actors/scala/actors/SchedulerAdapter.scala
+++ b/src/actors/scala/actors/SchedulerAdapter.scala
@@ -18,6 +18,7 @@ package scala.actors
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait SchedulerAdapter extends IScheduler {
/** Submits a <code>Runnable</code> for execution.
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
index f225987ddc..02b916a3b5 100644
--- a/src/actors/scala/actors/UncaughtException.scala
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -20,6 +20,7 @@ package scala.actors
* @author Philipp Haller
* @author Erik Engbrecht
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class UncaughtException(actor: InternalActor,
message: Option[Any],
sender: Option[OutputChannel[Any]],
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index d176487e03..ae960860cf 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -14,6 +14,7 @@ package scala
* A starting point for using the actors library would be [[scala.actors.Reactor]],
* [[scala.actors.ReplyReactor]], or [[scala.actors.Actor]] or their companion objects.
*
+ * @note As of release 2.10.1, replaced by <code>akka.actor</code> package. For migration of existing actors refer to the Actors Migration Guide.
*/
package object actors {
diff --git a/src/actors/scala/actors/remote/JavaSerializer.scala b/src/actors/scala/actors/remote/JavaSerializer.scala
index 6e9f4a7c51..7549bbf429 100644
--- a/src/actors/scala/actors/remote/JavaSerializer.scala
+++ b/src/actors/scala/actors/remote/JavaSerializer.scala
@@ -39,6 +39,7 @@ extends ObjectInputStream(in) {
/**
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class JavaSerializer(serv: Service, cl: ClassLoader) extends Serializer(serv) {
def serialize(o: AnyRef): Array[Byte] = {
val bos = new ByteArrayOutputStream()
diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala
index f1644c27ba..799076a01f 100644
--- a/src/actors/scala/actors/remote/RemoteActor.scala
+++ b/src/actors/scala/actors/remote/RemoteActor.scala
@@ -38,6 +38,7 @@ package remote
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object RemoteActor {
private val kernels = new scala.collection.mutable.HashMap[InternalActor, NetKernel]
@@ -127,4 +128,5 @@ object RemoteActor {
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class Node(address: String, port: Int)
diff --git a/src/actors/scala/actors/remote/Serializer.scala b/src/actors/scala/actors/remote/Serializer.scala
index e39b01fe24..7be4aa6583 100644
--- a/src/actors/scala/actors/remote/Serializer.scala
+++ b/src/actors/scala/actors/remote/Serializer.scala
@@ -16,6 +16,7 @@ import java.lang.ClassNotFoundException
import java.io.{DataInputStream, DataOutputStream, EOFException, IOException}
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
abstract class Serializer(val service: Service) {
def serialize(o: AnyRef): Array[Byte]
def deserialize(a: Array[Byte]): AnyRef
diff --git a/src/actors/scala/actors/remote/Service.scala b/src/actors/scala/actors/remote/Service.scala
index 4584cc308b..d102df1970 100644
--- a/src/actors/scala/actors/remote/Service.scala
+++ b/src/actors/scala/actors/remote/Service.scala
@@ -14,6 +14,7 @@ package remote
* @version 0.9.10
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait Service {
val kernel = new NetKernel(this)
val serializer: Serializer
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index bde05fd816..8163ae9fc6 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -24,6 +24,7 @@ import scala.util.Random
* @version 0.9.9
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object TcpService {
private val random = new Random
private val ports = new mutable.HashMap[Int, TcpService]
@@ -67,6 +68,7 @@ object TcpService {
* @version 0.9.10
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
val serializer: JavaSerializer = new JavaSerializer(this, cl)
diff --git a/src/actors/scala/actors/scheduler/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala
index 6d9a9458ba..a27799d132 100644
--- a/src/actors/scala/actors/scheduler/ActorGC.scala
+++ b/src/actors/scala/actors/scheduler/ActorGC.scala
@@ -23,6 +23,7 @@ import scala.collection.mutable
* (e.g. act method finishes, exit explicitly called, an exception is thrown),
* the ActorGC is informed via the `terminated` method.
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait ActorGC extends TerminationMonitor {
self: IScheduler =>
diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
index a2d6941ec1..b21a1aa3e6 100644
--- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
@@ -14,6 +14,7 @@ package scheduler
*
* @author Erik Engbrecht
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object DaemonScheduler extends DelegatingScheduler {
protected def makeNewScheduler(): IScheduler = {
diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
index a1d5666a24..4d3ebc3c04 100644
--- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
@@ -19,6 +19,7 @@ import scala.concurrent.ThreadPoolRunner
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object ExecutorScheduler {
private def start(sched: ExecutorScheduler): ExecutorScheduler = {
@@ -58,6 +59,7 @@ object ExecutorScheduler {
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait ExecutorScheduler extends Thread
with IScheduler with TerminationService
with ThreadPoolRunner {
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index ce67ffd037..ac123cfe26 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -9,6 +9,7 @@ import scala.concurrent.forkjoin._
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean, fair: Boolean)
extends Runnable with IScheduler with TerminationMonitor {
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
index f370d45094..2c4b7677b0 100644
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
@@ -22,6 +22,7 @@ import scala.concurrent.ManagedBlocker
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class ResizableThreadPoolScheduler(protected val terminate: Boolean,
protected val daemon: Boolean)
extends Thread with IScheduler with TerminationMonitor {
diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
index 04d1d2c5c1..03b235fe74 100644
--- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
+++ b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
@@ -17,6 +17,7 @@ import scala.collection.mutable
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class SingleThreadedScheduler extends IScheduler {
private val tasks = new mutable.Queue[Runnable]
diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java
index b96e730a73..c806ca71e8 100644
--- a/src/asm/scala/tools/asm/AnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/AnnotationVisitor.java
@@ -54,8 +54,9 @@ public abstract class AnnotationVisitor {
/**
* Constructs a new {@link AnnotationVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public AnnotationVisitor(final int api) {
this(api, null);
@@ -64,15 +65,17 @@ public abstract class AnnotationVisitor {
/**
* Constructs a new {@link AnnotationVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param av the annotation visitor to which this visitor must delegate
- * method calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param av
+ * the annotation visitor to which this visitor must delegate
+ * method calls. May be null.
*/
public AnnotationVisitor(final int api, final AnnotationVisitor av) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.av = av;
}
@@ -80,14 +83,17 @@ public abstract class AnnotationVisitor {
/**
* Visits a primitive value of the annotation.
*
- * @param name the value name.
- * @param value the actual value, whose type must be {@link Byte},
- * {@link Boolean}, {@link Character}, {@link Short}, {@link Integer}
- * , {@link Long}, {@link Float}, {@link Double}, {@link String} or
- * {@link Type} or OBJECT or ARRAY sort. This value can also be an
- * array of byte, boolean, short, char, int, long, float or double
- * values (this is equivalent to using {@link #visitArray visitArray}
- * and visiting each array element in turn, but is more convenient).
+ * @param name
+ * the value name.
+ * @param value
+ * the actual value, whose type must be {@link Byte},
+ * {@link Boolean}, {@link Character}, {@link Short},
+ * {@link Integer} , {@link Long}, {@link Float}, {@link Double},
+ * {@link String} or {@link Type} or OBJECT or ARRAY sort. This
+ * value can also be an array of byte, boolean, short, char, int,
+ * long, float or double values (this is equivalent to using
+ * {@link #visitArray visitArray} and visiting each array element
+ * in turn, but is more convenient).
*/
public void visit(String name, Object value) {
if (av != null) {
@@ -98,9 +104,12 @@ public abstract class AnnotationVisitor {
/**
* Visits an enumeration value of the annotation.
*
- * @param name the value name.
- * @param desc the class descriptor of the enumeration class.
- * @param value the actual enumeration value.
+ * @param name
+ * the value name.
+ * @param desc
+ * the class descriptor of the enumeration class.
+ * @param value
+ * the actual enumeration value.
*/
public void visitEnum(String name, String desc, String value) {
if (av != null) {
@@ -111,12 +120,14 @@ public abstract class AnnotationVisitor {
/**
* Visits a nested annotation value of the annotation.
*
- * @param name the value name.
- * @param desc the class descriptor of the nested annotation class.
+ * @param name
+ * the value name.
+ * @param desc
+ * the class descriptor of the nested annotation class.
* @return a visitor to visit the actual nested annotation value, or
- * <tt>null</tt> if this visitor is not interested in visiting
- * this nested annotation. <i>The nested annotation value must be
- * fully visited before calling other methods on this annotation
+ * <tt>null</tt> if this visitor is not interested in visiting this
+ * nested annotation. <i>The nested annotation value must be fully
+ * visited before calling other methods on this annotation
* visitor</i>.
*/
public AnnotationVisitor visitAnnotation(String name, String desc) {
@@ -132,10 +143,11 @@ public abstract class AnnotationVisitor {
* can be passed as value to {@link #visit visit}. This is what
* {@link ClassReader} does.
*
- * @param name the value name.
+ * @param name
+ * the value name.
* @return a visitor to visit the actual array value elements, or
- * <tt>null</tt> if this visitor is not interested in visiting
- * these values. The 'name' parameters passed to the methods of this
+ * <tt>null</tt> if this visitor is not interested in visiting these
+ * values. The 'name' parameters passed to the methods of this
* visitor are ignored. <i>All the array values must be visited
* before calling other methods on this annotation visitor</i>.
*/
diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java
index e530780249..8eb5b2ef48 100644
--- a/src/asm/scala/tools/asm/AnnotationWriter.java
+++ b/src/asm/scala/tools/asm/AnnotationWriter.java
@@ -90,20 +90,20 @@ final class AnnotationWriter extends AnnotationVisitor {
/**
* Constructs a new {@link AnnotationWriter}.
*
- * @param cw the class writer to which this annotation must be added.
- * @param named <tt>true<tt> if values are named, <tt>false</tt> otherwise.
- * @param bv where the annotation values must be stored.
- * @param parent where the number of annotation values must be stored.
- * @param offset where in <tt>parent</tt> the number of annotation values must
- * be stored.
+ * @param cw
+ * the class writer to which this annotation must be added.
+ * @param named
+ * <tt>true<tt> if values are named, <tt>false</tt> otherwise.
+ * @param bv
+ * where the annotation values must be stored.
+ * @param parent
+ * where the number of annotation values must be stored.
+ * @param offset
+ * where in <tt>parent</tt> the number of annotation values must
+ * be stored.
*/
- AnnotationWriter(
- final ClassWriter cw,
- final boolean named,
- final ByteVector bv,
- final ByteVector parent,
- final int offset)
- {
+ AnnotationWriter(final ClassWriter cw, final boolean named,
+ final ByteVector bv, final ByteVector parent, final int offset) {
super(Opcodes.ASM4);
this.cw = cw;
this.named = named;
@@ -190,11 +190,8 @@ final class AnnotationWriter extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
++size;
if (named) {
bv.putShort(cw.newUTF8(name));
@@ -203,10 +200,8 @@ final class AnnotationWriter extends AnnotationVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
++size;
if (named) {
bv.putShort(cw.newUTF8(name));
@@ -259,7 +254,8 @@ final class AnnotationWriter extends AnnotationVisitor {
* Puts the annotations of this annotation writer list into the given byte
* vector.
*
- * @param out where the annotations must be put.
+ * @param out
+ * where the annotations must be put.
*/
void put(final ByteVector out) {
int n = 0;
@@ -286,15 +282,15 @@ final class AnnotationWriter extends AnnotationVisitor {
/**
* Puts the given annotation lists into the given byte vector.
*
- * @param panns an array of annotation writer lists.
- * @param off index of the first annotation to be written.
- * @param out where the annotations must be put.
+ * @param panns
+ * an array of annotation writer lists.
+ * @param off
+ * index of the first annotation to be written.
+ * @param out
+ * where the annotations must be put.
*/
- static void put(
- final AnnotationWriter[] panns,
- final int off,
- final ByteVector out)
- {
+ static void put(final AnnotationWriter[] panns, final int off,
+ final ByteVector out) {
int size = 1 + 2 * (panns.length - off);
for (int i = off; i < panns.length; ++i) {
size += panns[i] == null ? 0 : panns[i].getSize();
diff --git a/src/asm/scala/tools/asm/Attribute.java b/src/asm/scala/tools/asm/Attribute.java
index 408f21ce1e..ac40a758a2 100644
--- a/src/asm/scala/tools/asm/Attribute.java
+++ b/src/asm/scala/tools/asm/Attribute.java
@@ -55,7 +55,8 @@ public class Attribute {
/**
* Constructs a new empty attribute.
*
- * @param type the type of the attribute.
+ * @param type
+ * the type of the attribute.
*/
protected Attribute(final String type) {
this.type = type;
@@ -91,39 +92,39 @@ public class Attribute {
}
/**
- * Reads a {@link #type type} attribute. This method must return a <i>new</i>
- * {@link Attribute} object, of type {@link #type type}, corresponding to
- * the <tt>len</tt> bytes starting at the given offset, in the given class
- * reader.
+ * Reads a {@link #type type} attribute. This method must return a
+ * <i>new</i> {@link Attribute} object, of type {@link #type type},
+ * corresponding to the <tt>len</tt> bytes starting at the given offset, in
+ * the given class reader.
*
- * @param cr the class that contains the attribute to be read.
- * @param off index of the first byte of the attribute's content in {@link
- * ClassReader#b cr.b}. The 6 attribute header bytes, containing the
- * type and the length of the attribute, are not taken into account
- * here.
- * @param len the length of the attribute's content.
- * @param buf buffer to be used to call
- * {@link ClassReader#readUTF8 readUTF8},
- * {@link ClassReader#readClass(int,char[]) readClass} or
- * {@link ClassReader#readConst readConst}.
- * @param codeOff index of the first byte of code's attribute content in
- * {@link ClassReader#b cr.b}, or -1 if the attribute to be read is
- * not a code attribute. The 6 attribute header bytes, containing the
- * type and the length of the attribute, are not taken into account
- * here.
- * @param labels the labels of the method's code, or <tt>null</tt> if the
- * attribute to be read is not a code attribute.
+ * @param cr
+ * the class that contains the attribute to be read.
+ * @param off
+ * index of the first byte of the attribute's content in
+ * {@link ClassReader#b cr.b}. The 6 attribute header bytes,
+ * containing the type and the length of the attribute, are not
+ * taken into account here.
+ * @param len
+ * the length of the attribute's content.
+ * @param buf
+ * buffer to be used to call {@link ClassReader#readUTF8
+ * readUTF8}, {@link ClassReader#readClass(int,char[]) readClass}
+ * or {@link ClassReader#readConst readConst}.
+ * @param codeOff
+ * index of the first byte of code's attribute content in
+ * {@link ClassReader#b cr.b}, or -1 if the attribute to be read
+ * is not a code attribute. The 6 attribute header bytes,
+ * containing the type and the length of the attribute, are not
+ * taken into account here.
+ * @param labels
+ * the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
* @return a <i>new</i> {@link Attribute} object corresponding to the given
* bytes.
*/
- protected Attribute read(
- final ClassReader cr,
- final int off,
- final int len,
- final char[] buf,
- final int codeOff,
- final Label[] labels)
- {
+ protected Attribute read(final ClassReader cr, final int off,
+ final int len, final char[] buf, final int codeOff,
+ final Label[] labels) {
Attribute attr = new Attribute(type);
attr.value = new byte[len];
System.arraycopy(cr.b, off, attr.value, 0, len);
@@ -133,30 +134,30 @@ public class Attribute {
/**
* Returns the byte array form of this attribute.
*
- * @param cw the class to which this attribute must be added. This parameter
- * can be used to add to the constant pool of this class the items
- * that corresponds to this attribute.
- * @param code the bytecode of the method corresponding to this code
- * attribute, or <tt>null</tt> if this attribute is not a code
- * attributes.
- * @param len the length of the bytecode of the method corresponding to this
- * code attribute, or <tt>null</tt> if this attribute is not a code
- * attribute.
- * @param maxStack the maximum stack size of the method corresponding to
- * this code attribute, or -1 if this attribute is not a code
- * attribute.
- * @param maxLocals the maximum number of local variables of the method
- * corresponding to this code attribute, or -1 if this attribute is
- * not a code attribute.
+ * @param cw
+ * the class to which this attribute must be added. This
+ * parameter can be used to add to the constant pool of this
+ * class the items that corresponds to this attribute.
+ * @param code
+ * the bytecode of the method corresponding to this code
+ * attribute, or <tt>null</tt> if this attribute is not a code
+ * attributes.
+ * @param len
+ * the length of the bytecode of the method corresponding to this
+ * code attribute, or <tt>null</tt> if this attribute is not a
+ * code attribute.
+ * @param maxStack
+ * the maximum stack size of the method corresponding to this
+ * code attribute, or -1 if this attribute is not a code
+ * attribute.
+ * @param maxLocals
+ * the maximum number of local variables of the method
+ * corresponding to this code attribute, or -1 if this attribute
+ * is not a code attribute.
* @return the byte array form of this attribute.
*/
- protected ByteVector write(
- final ClassWriter cw,
- final byte[] code,
- final int len,
- final int maxStack,
- final int maxLocals)
- {
+ protected ByteVector write(final ClassWriter cw, final byte[] code,
+ final int len, final int maxStack, final int maxLocals) {
ByteVector v = new ByteVector();
v.data = value;
v.length = value.length;
@@ -181,30 +182,30 @@ public class Attribute {
/**
* Returns the size of all the attributes in this attribute list.
*
- * @param cw the class writer to be used to convert the attributes into byte
- * arrays, with the {@link #write write} method.
- * @param code the bytecode of the method corresponding to these code
- * attributes, or <tt>null</tt> if these attributes are not code
- * attributes.
- * @param len the length of the bytecode of the method corresponding to
- * these code attributes, or <tt>null</tt> if these attributes are
- * not code attributes.
- * @param maxStack the maximum stack size of the method corresponding to
- * these code attributes, or -1 if these attributes are not code
- * attributes.
- * @param maxLocals the maximum number of local variables of the method
- * corresponding to these code attributes, or -1 if these attributes
- * are not code attributes.
+ * @param cw
+ * the class writer to be used to convert the attributes into
+ * byte arrays, with the {@link #write write} method.
+ * @param code
+ * the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len
+ * the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes
+ * are not code attributes.
+ * @param maxStack
+ * the maximum stack size of the method corresponding to these
+ * code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals
+ * the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these
+ * attributes are not code attributes.
* @return the size of all the attributes in this attribute list. This size
* includes the size of the attribute headers.
*/
- final int getSize(
- final ClassWriter cw,
- final byte[] code,
- final int len,
- final int maxStack,
- final int maxLocals)
- {
+ final int getSize(final ClassWriter cw, final byte[] code, final int len,
+ final int maxStack, final int maxLocals) {
Attribute attr = this;
int size = 0;
while (attr != null) {
@@ -219,30 +220,30 @@ public class Attribute {
* Writes all the attributes of this attribute list in the given byte
* vector.
*
- * @param cw the class writer to be used to convert the attributes into byte
- * arrays, with the {@link #write write} method.
- * @param code the bytecode of the method corresponding to these code
- * attributes, or <tt>null</tt> if these attributes are not code
- * attributes.
- * @param len the length of the bytecode of the method corresponding to
- * these code attributes, or <tt>null</tt> if these attributes are
- * not code attributes.
- * @param maxStack the maximum stack size of the method corresponding to
- * these code attributes, or -1 if these attributes are not code
- * attributes.
- * @param maxLocals the maximum number of local variables of the method
- * corresponding to these code attributes, or -1 if these attributes
- * are not code attributes.
- * @param out where the attributes must be written.
+ * @param cw
+ * the class writer to be used to convert the attributes into
+ * byte arrays, with the {@link #write write} method.
+ * @param code
+ * the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len
+ * the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes
+ * are not code attributes.
+ * @param maxStack
+ * the maximum stack size of the method corresponding to these
+ * code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals
+ * the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these
+ * attributes are not code attributes.
+ * @param out
+ * where the attributes must be written.
*/
- final void put(
- final ClassWriter cw,
- final byte[] code,
- final int len,
- final int maxStack,
- final int maxLocals,
- final ByteVector out)
- {
+ final void put(final ClassWriter cw, final byte[] code, final int len,
+ final int maxStack, final int maxLocals, final ByteVector out) {
Attribute attr = this;
while (attr != null) {
ByteVector b = attr.write(cw, code, len, maxStack, maxLocals);
diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java
index 5081f0184b..2bc63eb384 100644
--- a/src/asm/scala/tools/asm/ByteVector.java
+++ b/src/asm/scala/tools/asm/ByteVector.java
@@ -59,7 +59,8 @@ public class ByteVector {
* Constructs a new {@link ByteVector ByteVector} with the given initial
* size.
*
- * @param initialSize the initial size of the byte vector to be constructed.
+ * @param initialSize
+ * the initial size of the byte vector to be constructed.
*/
public ByteVector(final int initialSize) {
data = new byte[initialSize];
@@ -69,7 +70,8 @@ public class ByteVector {
* Puts a byte into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param b a byte.
+ * @param b
+ * a byte.
* @return this byte vector.
*/
public ByteVector putByte(final int b) {
@@ -86,8 +88,10 @@ public class ByteVector {
* Puts two bytes into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param b1 a byte.
- * @param b2 another byte.
+ * @param b1
+ * a byte.
+ * @param b2
+ * another byte.
* @return this byte vector.
*/
ByteVector put11(final int b1, final int b2) {
@@ -106,7 +110,8 @@ public class ByteVector {
* Puts a short into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param s a short.
+ * @param s
+ * a short.
* @return this byte vector.
*/
public ByteVector putShort(final int s) {
@@ -125,8 +130,10 @@ public class ByteVector {
* Puts a byte and a short into this byte vector. The byte vector is
* automatically enlarged if necessary.
*
- * @param b a byte.
- * @param s a short.
+ * @param b
+ * a byte.
+ * @param s
+ * a short.
* @return this byte vector.
*/
ByteVector put12(final int b, final int s) {
@@ -146,7 +153,8 @@ public class ByteVector {
* Puts an int into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param i an int.
+ * @param i
+ * an int.
* @return this byte vector.
*/
public ByteVector putInt(final int i) {
@@ -167,7 +175,8 @@ public class ByteVector {
* Puts a long into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param l a long.
+ * @param l
+ * a long.
* @return this byte vector.
*/
public ByteVector putLong(final long l) {
@@ -194,7 +203,8 @@ public class ByteVector {
* Puts an UTF8 string into this byte vector. The byte vector is
* automatically enlarged if necessary.
*
- * @param s a String.
+ * @param s
+ * a String.
* @return this byte vector.
*/
public ByteVector putUTF8(final String s) {
@@ -259,14 +269,16 @@ public class ByteVector {
* Puts an array of bytes into this byte vector. The byte vector is
* automatically enlarged if necessary.
*
- * @param b an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
- * null bytes into this byte vector.
- * @param off index of the fist byte of b that must be copied.
- * @param len number of bytes of b that must be copied.
+ * @param b
+ * an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
+ * null bytes into this byte vector.
+ * @param off
+ * index of the fist byte of b that must be copied.
+ * @param len
+ * number of bytes of b that must be copied.
* @return this byte vector.
*/
- public ByteVector putByteArray(final byte[] b, final int off, final int len)
- {
+ public ByteVector putByteArray(final byte[] b, final int off, final int len) {
if (length + len > data.length) {
enlarge(len);
}
@@ -280,8 +292,9 @@ public class ByteVector {
/**
* Enlarge this byte vector so that it can receive n more bytes.
*
- * @param size number of additional bytes that this byte vector should be
- * able to receive.
+ * @param size
+ * number of additional bytes that this byte vector should be
+ * able to receive.
*/
private void enlarge(final int size) {
int length1 = 2 * data.length;
diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java
index f3287d41ae..cc655c1b62 100644
--- a/src/asm/scala/tools/asm/ClassReader.java
+++ b/src/asm/scala/tools/asm/ClassReader.java
@@ -112,9 +112,8 @@ public class ClassReader {
public final byte[] b;
/**
- * The start index of each constant pool item in {@link #b b}, plus one.
- * The one byte offset skips the constant pool item tag that indicates its
- * type.
+ * The start index of each constant pool item in {@link #b b}, plus one. The
+ * one byte offset skips the constant pool item tag that indicates its type.
*/
private final int[] items;
@@ -147,7 +146,8 @@ public class ClassReader {
/**
* Constructs a new {@link ClassReader} object.
*
- * @param b the bytecode of the class to be read.
+ * @param b
+ * the bytecode of the class to be read.
*/
public ClassReader(final byte[] b) {
this(b, 0, b.length);
@@ -156,14 +156,17 @@ public class ClassReader {
/**
* Constructs a new {@link ClassReader} object.
*
- * @param b the bytecode of the class to be read.
- * @param off the start offset of the class data.
- * @param len the length of the class data.
+ * @param b
+ * the bytecode of the class to be read.
+ * @param off
+ * the start offset of the class data.
+ * @param len
+ * the length of the class data.
*/
public ClassReader(final byte[] b, final int off, final int len) {
this.b = b;
// checks the class version
- if (readShort(6) > Opcodes.V1_7) {
+ if (readShort(off + 6) > Opcodes.V1_7) {
throw new IllegalArgumentException();
}
// parses the constant pool
@@ -176,35 +179,35 @@ public class ClassReader {
items[i] = index + 1;
int size;
switch (b[index]) {
- case ClassWriter.FIELD:
- case ClassWriter.METH:
- case ClassWriter.IMETH:
- case ClassWriter.INT:
- case ClassWriter.FLOAT:
- case ClassWriter.NAME_TYPE:
- case ClassWriter.INDY:
- size = 5;
- break;
- case ClassWriter.LONG:
- case ClassWriter.DOUBLE:
- size = 9;
- ++i;
- break;
- case ClassWriter.UTF8:
- size = 3 + readUnsignedShort(index + 1);
- if (size > max) {
- max = size;
- }
- break;
- case ClassWriter.HANDLE:
- size = 4;
- break;
- // case ClassWriter.CLASS:
- // case ClassWriter.STR:
- // case ClassWriter.MTYPE
- default:
- size = 3;
- break;
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ case ClassWriter.NAME_TYPE:
+ case ClassWriter.INDY:
+ size = 5;
+ break;
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ size = 9;
+ ++i;
+ break;
+ case ClassWriter.UTF8:
+ size = 3 + readUnsignedShort(index + 1);
+ if (size > max) {
+ max = size;
+ }
+ break;
+ case ClassWriter.HANDLE:
+ size = 4;
+ break;
+ // case ClassWriter.CLASS:
+ // case ClassWriter.STR:
+ // case ClassWriter.MTYPE
+ default:
+ size = 3;
+ break;
}
index += size;
}
@@ -249,8 +252,7 @@ public class ClassReader {
* @see ClassVisitor#visit(int, int, String, String, String, String[])
*/
public String getSuperName() {
- int n = items[readUnsignedShort(header + 4)];
- return n == 0 ? null : readUTF8(n, new char[maxStringLength]);
+ return readClass(header + 4, new char[maxStringLength]);
}
/**
@@ -280,7 +282,8 @@ public class ClassReader {
* Copies the constant pool data into the given {@link ClassWriter}. Should
* be called before the {@link #accept(ClassVisitor,int)} method.
*
- * @param classWriter the {@link ClassWriter} to copy constant pool into.
+ * @param classWriter
+ * the {@link ClassWriter} to copy constant pool into.
*/
void copyPool(final ClassWriter classWriter) {
char[] buf = new char[maxStringLength];
@@ -292,82 +295,63 @@ public class ClassReader {
Item item = new Item(i);
int nameType;
switch (tag) {
- case ClassWriter.FIELD:
- case ClassWriter.METH:
- case ClassWriter.IMETH:
- nameType = items[readUnsignedShort(index + 2)];
- item.set(tag,
- readClass(index, buf),
- readUTF8(nameType, buf),
- readUTF8(nameType + 2, buf));
- break;
-
- case ClassWriter.INT:
- item.set(readInt(index));
- break;
-
- case ClassWriter.FLOAT:
- item.set(Float.intBitsToFloat(readInt(index)));
- break;
-
- case ClassWriter.NAME_TYPE:
- item.set(tag,
- readUTF8(index, buf),
- readUTF8(index + 2, buf),
- null);
- break;
-
- case ClassWriter.LONG:
- item.set(readLong(index));
- ++i;
- break;
-
- case ClassWriter.DOUBLE:
- item.set(Double.longBitsToDouble(readLong(index)));
- ++i;
- break;
-
- case ClassWriter.UTF8: {
- String s = strings[i];
- if (s == null) {
- index = items[i];
- s = strings[i] = readUTF(index + 2,
- readUnsignedShort(index),
- buf);
- }
- item.set(tag, s, null, null);
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(tag, readClass(index, buf), readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf));
+ break;
+ case ClassWriter.INT:
+ item.set(readInt(index));
+ break;
+ case ClassWriter.FLOAT:
+ item.set(Float.intBitsToFloat(readInt(index)));
+ break;
+ case ClassWriter.NAME_TYPE:
+ item.set(tag, readUTF8(index, buf), readUTF8(index + 2, buf),
+ null);
+ break;
+ case ClassWriter.LONG:
+ item.set(readLong(index));
+ ++i;
+ break;
+ case ClassWriter.DOUBLE:
+ item.set(Double.longBitsToDouble(readLong(index)));
+ ++i;
+ break;
+ case ClassWriter.UTF8: {
+ String s = strings[i];
+ if (s == null) {
+ index = items[i];
+ s = strings[i] = readUTF(index + 2,
+ readUnsignedShort(index), buf);
}
- break;
-
- case ClassWriter.HANDLE: {
- int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
- nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
- item.set(ClassWriter.HANDLE_BASE + readByte(index),
- readClass(fieldOrMethodRef, buf),
- readUTF8(nameType, buf),
- readUTF8(nameType + 2, buf));
-
+ item.set(tag, s, null, null);
+ break;
+ }
+ case ClassWriter.HANDLE: {
+ int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
+ nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
+ item.set(ClassWriter.HANDLE_BASE + readByte(index),
+ readClass(fieldOrMethodRef, buf),
+ readUTF8(nameType, buf), readUTF8(nameType + 2, buf));
+ break;
+ }
+ case ClassWriter.INDY:
+ if (classWriter.bootstrapMethods == null) {
+ copyBootstrapMethods(classWriter, items2, buf);
}
- break;
-
-
- case ClassWriter.INDY:
- if (classWriter.bootstrapMethods == null) {
- copyBootstrapMethods(classWriter, items2, buf);
- }
- nameType = items[readUnsignedShort(index + 2)];
- item.set(readUTF8(nameType, buf),
- readUTF8(nameType + 2, buf),
- readUnsignedShort(index));
- break;
-
-
- // case ClassWriter.STR:
- // case ClassWriter.CLASS:
- // case ClassWriter.MTYPE
- default:
- item.set(tag, readUTF8(index, buf), null, null);
- break;
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(readUTF8(nameType, buf), readUTF8(nameType + 2, buf),
+ readUnsignedShort(index));
+ break;
+ // case ClassWriter.STR:
+ // case ClassWriter.CLASS:
+ // case ClassWriter.MTYPE
+ default:
+ item.set(tag, readUTF8(index, buf), null, null);
+ break;
}
int index2 = item.hashCode % items2.length;
@@ -382,77 +366,59 @@ public class ClassReader {
classWriter.index = ll;
}
- private void copyBootstrapMethods(ClassWriter classWriter, Item[] items2, char[] buf) {
- int i, j, k, u, v;
-
- // skip class header
- v = header;
- v += 8 + (readUnsignedShort(v + 6) << 1);
-
- // skips fields and methods
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
+ /**
+ * Copies the bootstrap method data into the given {@link ClassWriter}.
+ * Should be called before the {@link #accept(ClassVisitor,int)} method.
+ *
+ * @param classWriter
+ * the {@link ClassWriter} to copy bootstrap methods into.
+ */
+ private void copyBootstrapMethods(final ClassWriter classWriter,
+ final Item[] items, final char[] c) {
+ // finds the "BootstrapMethods" attribute
+ int u = getAttributes();
+ boolean found = false;
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ if ("BootstrapMethods".equals(attrName)) {
+ found = true;
+ break;
}
+ u += 6 + readInt(u + 4);
}
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
- }
+ if (!found) {
+ return;
}
-
- // read class attributes
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- String attrName = readUTF8(v, buf);
- int size = readInt(v + 2);
- if ("BootstrapMethods".equals(attrName)) {
- int boostrapMethodCount = readUnsignedShort(v + 6);
- int x = v + 8;
- for (j = 0; j < boostrapMethodCount; j++) {
- int hashCode = readConst(readUnsignedShort(x), buf).hashCode();
- k = readUnsignedShort(x + 2);
- u = x + 4;
- for(; k > 0; --k) {
- hashCode ^= readConst(readUnsignedShort(u), buf).hashCode();
- u += 2;
- }
- Item item = new Item(j);
- item.set(x - v - 8, hashCode & 0x7FFFFFFF);
-
- int index2 = item.hashCode % items2.length;
- item.next = items2[index2];
- items2[index2] = item;
-
- x = u;
- }
-
- classWriter.bootstrapMethodsCount = boostrapMethodCount;
- ByteVector bootstrapMethods = new ByteVector(size + 62);
- bootstrapMethods.putByteArray(b, v + 8, size - 2);
- classWriter.bootstrapMethods = bootstrapMethods;
- return;
+ // copies the bootstrap methods in the class writer
+ int boostrapMethodCount = readUnsignedShort(u + 8);
+ for (int j = 0, v = u + 10; j < boostrapMethodCount; j++) {
+ int position = v - u - 10;
+ int hashCode = readConst(readUnsignedShort(v), c).hashCode();
+ for (int k = readUnsignedShort(v + 2); k > 0; --k) {
+ hashCode ^= readConst(readUnsignedShort(v + 4), c).hashCode();
+ v += 2;
}
- v += 6 + size;
+ v += 4;
+ Item item = new Item(j);
+ item.set(position, hashCode & 0x7FFFFFFF);
+ int index = item.hashCode % items.length;
+ item.next = items[index];
+ items[index] = item;
}
-
- // we are in trouble !!!
+ int attrSize = readInt(u + 4);
+ ByteVector bootstrapMethods = new ByteVector(attrSize + 62);
+ bootstrapMethods.putByteArray(b, u + 10, attrSize - 2);
+ classWriter.bootstrapMethodsCount = boostrapMethodCount;
+ classWriter.bootstrapMethods = bootstrapMethods;
}
/**
* Constructs a new {@link ClassReader} object.
*
- * @param is an input stream from which to read the class.
- * @throws IOException if a problem occurs during reading.
+ * @param is
+ * an input stream from which to read the class.
+ * @throws IOException
+ * if a problem occurs during reading.
*/
public ClassReader(final InputStream is) throws IOException {
this(readClass(is, false));
@@ -461,25 +427,30 @@ public class ClassReader {
/**
* Constructs a new {@link ClassReader} object.
*
- * @param name the binary qualified name of the class to be read.
- * @throws IOException if an exception occurs during reading.
+ * @param name
+ * the binary qualified name of the class to be read.
+ * @throws IOException
+ * if an exception occurs during reading.
*/
public ClassReader(final String name) throws IOException {
- this(readClass(ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
- + ".class"), true));
+ this(readClass(
+ ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
+ + ".class"), true));
}
/**
* Reads the bytecode of a class.
*
- * @param is an input stream from which to read the class.
- * @param close true to close the input stream after reading.
+ * @param is
+ * an input stream from which to read the class.
+ * @param close
+ * true to close the input stream after reading.
* @return the bytecode read from the given input stream.
- * @throws IOException if a problem occurs during reading.
+ * @throws IOException
+ * if a problem occurs during reading.
*/
private static byte[] readClass(final InputStream is, boolean close)
- throws IOException
- {
+ throws IOException {
if (is == null) {
throw new IOException("Class not found");
}
@@ -520,14 +491,16 @@ public class ClassReader {
// ------------------------------------------------------------------------
/**
- * Makes the given visitor visit the Java class of this {@link ClassReader}.
- * This class is the one specified in the constructor (see
+ * Makes the given visitor visit the Java class of this {@link ClassReader}
+ * . This class is the one specified in the constructor (see
* {@link #ClassReader(byte[]) ClassReader}).
*
- * @param classVisitor the visitor that must visit this class.
- * @param flags option flags that can be used to modify the default behavior
- * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
- * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ * @param classVisitor
+ * the visitor that must visit this class.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}
+ * , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
*/
public void accept(final ClassVisitor classVisitor, final int flags) {
accept(classVisitor, new Attribute[0], flags);
@@ -538,1117 +511,923 @@ public class ClassReader {
* This class is the one specified in the constructor (see
* {@link #ClassReader(byte[]) ClassReader}).
*
- * @param classVisitor the visitor that must visit this class.
- * @param attrs prototypes of the attributes that must be parsed during the
- * visit of the class. Any attribute whose type is not equal to the
- * type of one the prototypes will not be parsed: its byte array
- * value will be passed unchanged to the ClassWriter. <i>This may
- * corrupt it if this value contains references to the constant pool,
- * or has syntactic or semantic links with a class element that has
- * been transformed by a class adapter between the reader and the
- * writer</i>.
- * @param flags option flags that can be used to modify the default behavior
- * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
- * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ * @param classVisitor
+ * the visitor that must visit this class.
+ * @param attrs
+ * prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to
+ * the type of one the prototypes will not be parsed: its byte
+ * array value will be passed unchanged to the ClassWriter.
+ * <i>This may corrupt it if this value contains references to
+ * the constant pool, or has syntactic or semantic links with a
+ * class element that has been transformed by a class adapter
+ * between the reader and the writer</i>.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}
+ * , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
*/
- public void accept(
- final ClassVisitor classVisitor,
- final Attribute[] attrs,
- final int flags)
- {
- byte[] b = this.b; // the bytecode array
+ public void accept(final ClassVisitor classVisitor,
+ final Attribute[] attrs, final int flags) {
+ int u = header; // current offset in the class file
char[] c = new char[maxStringLength]; // buffer used to read strings
- int i, j, k; // loop variables
- int u, v, w; // indexes in b
- Attribute attr;
-
- int access;
- String name;
- String desc;
- String attrName;
- String signature;
- int anns = 0;
- int ianns = 0;
- Attribute cattrs = null;
-
- // visits the header
- u = header;
- access = readUnsignedShort(u);
- name = readClass(u + 2, c);
- v = items[readUnsignedShort(u + 4)];
- String superClassName = v == 0 ? null : readUTF8(v, c);
- String[] implementedItfs = new String[readUnsignedShort(u + 6)];
- w = 0;
+
+ Context context = new Context();
+ context.attrs = attrs;
+ context.flags = flags;
+ context.buffer = c;
+
+ // reads the class declaration
+ int access = readUnsignedShort(u);
+ String name = readClass(u + 2, c);
+ String superClass = readClass(u + 4, c);
+ String[] interfaces = new String[readUnsignedShort(u + 6)];
u += 8;
- for (i = 0; i < implementedItfs.length; ++i) {
- implementedItfs[i] = readClass(u, c);
+ for (int i = 0; i < interfaces.length; ++i) {
+ interfaces[i] = readClass(u, c);
u += 2;
}
- boolean skipCode = (flags & SKIP_CODE) != 0;
- boolean skipDebug = (flags & SKIP_DEBUG) != 0;
- boolean unzip = (flags & EXPAND_FRAMES) != 0;
-
- // skips fields and methods
- v = u;
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
- }
- }
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
- }
- }
- // reads the class's attributes
- signature = null;
+ // reads the class attributes
+ String signature = null;
String sourceFile = null;
String sourceDebug = null;
String enclosingOwner = null;
String enclosingName = null;
String enclosingDesc = null;
- int[] bootstrapMethods = null; // start indexed of the bsms
+ int anns = 0;
+ int ianns = 0;
+ int innerClasses = 0;
+ Attribute attributes = null;
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- attrName = readUTF8(v, c);
+ u = getAttributes();
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
// tests are sorted in decreasing frequency order
// (based on frequencies observed on typical classes)
if ("SourceFile".equals(attrName)) {
- sourceFile = readUTF8(v + 6, c);
+ sourceFile = readUTF8(u + 8, c);
} else if ("InnerClasses".equals(attrName)) {
- w = v + 6;
+ innerClasses = u + 8;
} else if ("EnclosingMethod".equals(attrName)) {
- enclosingOwner = readClass(v + 6, c);
- int item = readUnsignedShort(v + 8);
+ enclosingOwner = readClass(u + 8, c);
+ int item = readUnsignedShort(u + 10);
if (item != 0) {
enclosingName = readUTF8(items[item], c);
enclosingDesc = readUTF8(items[item] + 2, c);
}
} else if (SIGNATURES && "Signature".equals(attrName)) {
- signature = readUTF8(v + 6, c);
- } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
- anns = v + 6;
+ signature = readUTF8(u + 8, c);
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 8;
} else if ("Deprecated".equals(attrName)) {
access |= Opcodes.ACC_DEPRECATED;
} else if ("Synthetic".equals(attrName)) {
- access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ access |= Opcodes.ACC_SYNTHETIC
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
} else if ("SourceDebugExtension".equals(attrName)) {
- int len = readInt(v + 2);
- sourceDebug = readUTF(v + 6, len, new char[len]);
- } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
- ianns = v + 6;
+ int len = readInt(u + 4);
+ sourceDebug = readUTF(u + 8, len, new char[len]);
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 8;
} else if ("BootstrapMethods".equals(attrName)) {
- int boostrapMethodCount = readUnsignedShort(v + 6);
- bootstrapMethods = new int[boostrapMethodCount];
- int x = v + 8;
- for (j = 0; j < boostrapMethodCount; j++) {
- bootstrapMethods[j] = x;
- x += 2 + readUnsignedShort(x + 2) << 1;
+ int[] bootstrapMethods = new int[readUnsignedShort(u + 8)];
+ for (int j = 0, v = u + 10; j < bootstrapMethods.length; j++) {
+ bootstrapMethods[j] = v;
+ v += 2 + readUnsignedShort(v + 2) << 1;
}
+ context.bootstrapMethods = bootstrapMethods;
} else {
- attr = readAttribute(attrs,
- attrName,
- v + 6,
- readInt(v + 2),
- c,
- -1,
- null);
+ Attribute attr = readAttribute(attrs, attrName, u + 8,
+ readInt(u + 4), c, -1, null);
if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
+ attr.next = attributes;
+ attributes = attr;
}
}
- v += 6 + readInt(v + 2);
+ u += 6 + readInt(u + 4);
}
- // calls the visit method
- classVisitor.visit(readInt(4),
- access,
- name,
- signature,
- superClassName,
- implementedItfs);
-
- // calls the visitSource method
- if (!skipDebug && (sourceFile != null || sourceDebug != null)) {
+
+ // visits the class declaration
+ classVisitor.visit(readInt(items[1] - 7), access, name, signature,
+ superClass, interfaces);
+
+ // visits the source and debug info
+ if ((flags & SKIP_DEBUG) == 0
+ && (sourceFile != null || sourceDebug != null)) {
classVisitor.visitSource(sourceFile, sourceDebug);
}
- // calls the visitOuterClass method
+ // visits the outer class
if (enclosingOwner != null) {
- classVisitor.visitOuterClass(enclosingOwner,
- enclosingName,
+ classVisitor.visitOuterClass(enclosingOwner, enclosingName,
enclosingDesc);
}
// visits the class annotations
- if (ANNOTATIONS) {
- for (i = 1; i >= 0; --i) {
- v = i == 0 ? ianns : anns;
- if (v != 0) {
- j = readUnsignedShort(v);
- v += 2;
- for (; j > 0; --j) {
- v = readAnnotationValues(v + 2,
- c,
- true,
- classVisitor.visitAnnotation(readUTF8(v, c), i != 0));
- }
- }
+ if (ANNOTATIONS && anns != 0) {
+ for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ classVisitor.visitAnnotation(readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && ianns != 0) {
+ for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ classVisitor.visitAnnotation(readUTF8(v, c), false));
}
}
- // visits the class attributes
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- classVisitor.visitAttribute(cattrs);
- cattrs = attr;
+ // visits the attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ classVisitor.visitAttribute(attributes);
+ attributes = attr;
}
- // calls the visitInnerClass method
- if (w != 0) {
- i = readUnsignedShort(w);
- w += 2;
- for (; i > 0; --i) {
- classVisitor.visitInnerClass(readUnsignedShort(w) == 0
- ? null
- : readClass(w, c), readUnsignedShort(w + 2) == 0
- ? null
- : readClass(w + 2, c), readUnsignedShort(w + 4) == 0
- ? null
- : readUTF8(w + 4, c), readUnsignedShort(w + 6));
- w += 8;
+ // visits the inner classes
+ if (innerClasses != 0) {
+ int v = innerClasses + 2;
+ for (int i = readUnsignedShort(innerClasses); i > 0; --i) {
+ classVisitor.visitInnerClass(readClass(v, c),
+ readClass(v + 2, c), readUTF8(v + 4, c),
+ readUnsignedShort(v + 6));
+ v += 8;
}
}
- // visits the fields
- i = readUnsignedShort(u);
+ // visits the fields and methods
+ u = header + 10 + 2 * interfaces.length;
+ for (int i = readUnsignedShort(u - 2); i > 0; --i) {
+ u = readField(classVisitor, context, u);
+ }
u += 2;
- for (; i > 0; --i) {
- access = readUnsignedShort(u);
- name = readUTF8(u + 2, c);
- desc = readUTF8(u + 4, c);
- // visits the field's attributes and looks for a ConstantValue
- // attribute
- int fieldValueItem = 0;
- signature = null;
- anns = 0;
- ianns = 0;
- cattrs = null;
-
- j = readUnsignedShort(u + 6);
- u += 8;
- for (; j > 0; --j) {
- attrName = readUTF8(u, c);
- // tests are sorted in decreasing frequency order
- // (based on frequencies observed on typical classes)
- if ("ConstantValue".equals(attrName)) {
- fieldValueItem = readUnsignedShort(u + 6);
- } else if (SIGNATURES && "Signature".equals(attrName)) {
- signature = readUTF8(u + 6, c);
- } else if ("Deprecated".equals(attrName)) {
- access |= Opcodes.ACC_DEPRECATED;
- } else if ("Synthetic".equals(attrName)) {
- access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
- } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
- anns = u + 6;
- } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
- ianns = u + 6;
- } else {
- attr = readAttribute(attrs,
- attrName,
- u + 6,
- readInt(u + 2),
- c,
- -1,
- null);
- if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
- }
+ for (int i = readUnsignedShort(u - 2); i > 0; --i) {
+ u = readMethod(classVisitor, context, u);
+ }
+
+ // visits the end of the class
+ classVisitor.visitEnd();
+ }
+
+ /**
+ * Reads a field and makes the given visitor visit it.
+ *
+ * @param classVisitor
+ * the visitor that must visit the field.
+ * @param context
+ * information about the class being parsed.
+ * @param u
+ * the start offset of the field in the class file.
+ * @return the offset of the first byte following the field in the class.
+ */
+ private int readField(final ClassVisitor classVisitor,
+ final Context context, int u) {
+ // reads the field declaration
+ char[] c = context.buffer;
+ int access = readUnsignedShort(u);
+ String name = readUTF8(u + 2, c);
+ String desc = readUTF8(u + 4, c);
+ u += 6;
+
+ // reads the field attributes
+ String signature = null;
+ int anns = 0;
+ int ianns = 0;
+ Object value = null;
+ Attribute attributes = null;
+
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("ConstantValue".equals(attrName)) {
+ int item = readUnsignedShort(u + 8);
+ value = item == 0 ? null : readConst(item, c);
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u + 8, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 8;
+ } else {
+ Attribute attr = readAttribute(context.attrs, attrName, u + 8,
+ readInt(u + 4), c, -1, null);
+ if (attr != null) {
+ attr.next = attributes;
+ attributes = attr;
}
- u += 6 + readInt(u + 2);
}
- // visits the field
- FieldVisitor fv = classVisitor.visitField(access,
- name,
- desc,
- signature,
- fieldValueItem == 0 ? null : readConst(fieldValueItem, c));
- // visits the field annotations and attributes
- if (fv != null) {
- if (ANNOTATIONS) {
- for (j = 1; j >= 0; --j) {
- v = j == 0 ? ianns : anns;
- if (v != 0) {
- k = readUnsignedShort(v);
- v += 2;
- for (; k > 0; --k) {
- v = readAnnotationValues(v + 2,
- c,
- true,
- fv.visitAnnotation(readUTF8(v, c), j != 0));
- }
- }
- }
- }
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- fv.visitAttribute(cattrs);
- cattrs = attr;
- }
- fv.visitEnd();
+ u += 6 + readInt(u + 4);
+ }
+ u += 2;
+
+ // visits the field declaration
+ FieldVisitor fv = classVisitor.visitField(access, name, desc,
+ signature, value);
+ if (fv == null) {
+ return u;
+ }
+
+ // visits the field annotations
+ if (ANNOTATIONS && anns != 0) {
+ for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ fv.visitAnnotation(readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && ianns != 0) {
+ for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ fv.visitAnnotation(readUTF8(v, c), false));
}
}
- // visits the methods
- i = readUnsignedShort(u);
- u += 2;
- for (; i > 0; --i) {
- int u0 = u + 6;
- access = readUnsignedShort(u);
- name = readUTF8(u + 2, c);
- desc = readUTF8(u + 4, c);
- signature = null;
- anns = 0;
- ianns = 0;
- int dann = 0;
- int mpanns = 0;
- int impanns = 0;
- cattrs = null;
- v = 0;
- w = 0;
-
- // looks for Code and Exceptions attributes
- j = readUnsignedShort(u + 6);
- u += 8;
- for (; j > 0; --j) {
- attrName = readUTF8(u, c);
- int attrSize = readInt(u + 2);
- u += 6;
- // tests are sorted in decreasing frequency order
- // (based on frequencies observed on typical classes)
- if ("Code".equals(attrName)) {
- if (!skipCode) {
- v = u;
- }
- } else if ("Exceptions".equals(attrName)) {
- w = u;
- } else if (SIGNATURES && "Signature".equals(attrName)) {
- signature = readUTF8(u, c);
- } else if ("Deprecated".equals(attrName)) {
- access |= Opcodes.ACC_DEPRECATED;
- } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
- anns = u;
- } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
- dann = u;
- } else if ("Synthetic".equals(attrName)) {
- access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
- } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
- ianns = u;
- } else if (ANNOTATIONS && "RuntimeVisibleParameterAnnotations".equals(attrName))
- {
- mpanns = u;
- } else if (ANNOTATIONS && "RuntimeInvisibleParameterAnnotations".equals(attrName))
- {
- impanns = u;
- } else {
- attr = readAttribute(attrs,
- attrName,
- u,
- attrSize,
- c,
- -1,
- null);
- if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
- }
+ // visits the field attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ fv.visitAttribute(attributes);
+ attributes = attr;
+ }
+
+ // visits the end of the field
+ fv.visitEnd();
+
+ return u;
+ }
+
+ /**
+ * Reads a method and makes the given visitor visit it.
+ *
+ * @param classVisitor
+ * the visitor that must visit the method.
+ * @param context
+ * information about the class being parsed.
+ * @param u
+ * the start offset of the method in the class file.
+ * @return the offset of the first byte following the method in the class.
+ */
+ private int readMethod(final ClassVisitor classVisitor,
+ final Context context, int u) {
+ // reads the method declaration
+ char[] c = context.buffer;
+ int access = readUnsignedShort(u);
+ String name = readUTF8(u + 2, c);
+ String desc = readUTF8(u + 4, c);
+ u += 6;
+
+ // reads the method attributes
+ int code = 0;
+ int exception = 0;
+ String[] exceptions = null;
+ String signature = null;
+ int anns = 0;
+ int ianns = 0;
+ int dann = 0;
+ int mpanns = 0;
+ int impanns = 0;
+ int firstAttribute = u;
+ Attribute attributes = null;
+
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("Code".equals(attrName)) {
+ if ((context.flags & SKIP_CODE) == 0) {
+ code = u + 8;
}
- u += attrSize;
- }
- // reads declared exceptions
- String[] exceptions;
- if (w == 0) {
- exceptions = null;
+ } else if ("Exceptions".equals(attrName)) {
+ exceptions = new String[readUnsignedShort(u + 8)];
+ exception = u + 10;
+ for (int j = 0; j < exceptions.length; ++j) {
+ exceptions[j] = readClass(exception, c);
+ exception += 2;
+ }
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u + 8, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 8;
+ } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
+ dann = u + 8;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleParameterAnnotations".equals(attrName)) {
+ mpanns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleParameterAnnotations".equals(attrName)) {
+ impanns = u + 8;
} else {
- exceptions = new String[readUnsignedShort(w)];
- w += 2;
- for (j = 0; j < exceptions.length; ++j) {
- exceptions[j] = readClass(w, c);
- w += 2;
+ Attribute attr = readAttribute(context.attrs, attrName, u + 8,
+ readInt(u + 4), c, -1, null);
+ if (attr != null) {
+ attr.next = attributes;
+ attributes = attr;
}
}
+ u += 6 + readInt(u + 4);
+ }
+ u += 2;
- // visits the method's code, if any
- MethodVisitor mv = classVisitor.visitMethod(access,
- name,
- desc,
- signature,
- exceptions);
+ // visits the method declaration
+ MethodVisitor mv = classVisitor.visitMethod(access, name, desc,
+ signature, exceptions);
+ if (mv == null) {
+ return u;
+ }
- if (mv != null) {
- /*
- * if the returned MethodVisitor is in fact a MethodWriter, it
- * means there is no method adapter between the reader and the
- * writer. If, in addition, the writer's constant pool was
- * copied from this reader (mw.cw.cr == this), and the signature
- * and exceptions of the method have not been changed, then it
- * is possible to skip all visit events and just copy the
- * original code of the method to the writer (the access, name
- * and descriptor can have been changed, this is not important
- * since they are not copied as is from the reader).
- */
- if (WRITER && mv instanceof MethodWriter) {
- MethodWriter mw = (MethodWriter) mv;
- if (mw.cw.cr == this) {
- if (signature == mw.signature) {
- boolean sameExceptions = false;
- if (exceptions == null) {
- sameExceptions = mw.exceptionCount == 0;
- } else {
- if (exceptions.length == mw.exceptionCount) {
- sameExceptions = true;
- for (j = exceptions.length - 1; j >= 0; --j)
- {
- w -= 2;
- if (mw.exceptions[j] != readUnsignedShort(w))
- {
- sameExceptions = false;
- break;
- }
- }
- }
- }
- if (sameExceptions) {
- /*
- * we do not copy directly the code into
- * MethodWriter to save a byte array copy
- * operation. The real copy will be done in
- * ClassWriter.toByteArray().
- */
- mw.classReaderOffset = u0;
- mw.classReaderLength = u - u0;
- continue;
- }
+ /*
+ * if the returned MethodVisitor is in fact a MethodWriter, it means
+ * there is no method adapter between the reader and the writer. If, in
+ * addition, the writer's constant pool was copied from this reader
+ * (mw.cw.cr == this), and the signature and exceptions of the method
+ * have not been changed, then it is possible to skip all visit events
+ * and just copy the original code of the method to the writer (the
+ * access, name and descriptor can have been changed, this is not
+ * important since they are not copied as is from the reader).
+ */
+ if (WRITER && mv instanceof MethodWriter) {
+ MethodWriter mw = (MethodWriter) mv;
+ if (mw.cw.cr == this && signature == mw.signature) {
+ boolean sameExceptions = false;
+ if (exceptions == null) {
+ sameExceptions = mw.exceptionCount == 0;
+ } else if (exceptions.length == mw.exceptionCount) {
+ sameExceptions = true;
+ for (int j = exceptions.length - 1; j >= 0; --j) {
+ exception -= 2;
+ if (mw.exceptions[j] != readUnsignedShort(exception)) {
+ sameExceptions = false;
+ break;
}
}
}
-
- if (ANNOTATIONS && dann != 0) {
- AnnotationVisitor dv = mv.visitAnnotationDefault();
- readAnnotationValue(dann, c, null, dv);
- if (dv != null) {
- dv.visitEnd();
- }
- }
- if (ANNOTATIONS) {
- for (j = 1; j >= 0; --j) {
- w = j == 0 ? ianns : anns;
- if (w != 0) {
- k = readUnsignedShort(w);
- w += 2;
- for (; k > 0; --k) {
- w = readAnnotationValues(w + 2,
- c,
- true,
- mv.visitAnnotation(readUTF8(w, c), j != 0));
- }
- }
- }
+ if (sameExceptions) {
+ /*
+ * we do not copy directly the code into MethodWriter to
+ * save a byte array copy operation. The real copy will be
+ * done in ClassWriter.toByteArray().
+ */
+ mw.classReaderOffset = firstAttribute;
+ mw.classReaderLength = u - firstAttribute;
+ return u;
}
- if (ANNOTATIONS && mpanns != 0) {
- readParameterAnnotations(mpanns, desc, c, true, mv);
+ }
+ }
+
+ // visits the method annotations
+ if (ANNOTATIONS && dann != 0) {
+ AnnotationVisitor dv = mv.visitAnnotationDefault();
+ readAnnotationValue(dann, c, null, dv);
+ if (dv != null) {
+ dv.visitEnd();
+ }
+ }
+ if (ANNOTATIONS && anns != 0) {
+ for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitAnnotation(readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && ianns != 0) {
+ for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitAnnotation(readUTF8(v, c), false));
+ }
+ }
+ if (ANNOTATIONS && mpanns != 0) {
+ readParameterAnnotations(mpanns, desc, c, true, mv);
+ }
+ if (ANNOTATIONS && impanns != 0) {
+ readParameterAnnotations(impanns, desc, c, false, mv);
+ }
+
+ // visits the method attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ mv.visitAttribute(attributes);
+ attributes = attr;
+ }
+
+ // visits the method code
+ if (code != 0) {
+ context.access = access;
+ context.name = name;
+ context.desc = desc;
+ mv.visitCode();
+ readCode(mv, context, code);
+ }
+
+ // visits the end of the method
+ mv.visitEnd();
+
+ return u;
+ }
+
+ /**
+ * Reads the bytecode of a method and makes the given visitor visit it.
+ *
+ * @param mv
+ * the visitor that must visit the method's code.
+ * @param context
+ * information about the class being parsed.
+ * @param u
+ * the start offset of the code attribute in the class file.
+ */
+ private void readCode(final MethodVisitor mv, final Context context, int u) {
+ // reads the header
+ byte[] b = this.b;
+ char[] c = context.buffer;
+ int maxStack = readUnsignedShort(u);
+ int maxLocals = readUnsignedShort(u + 2);
+ int codeLength = readInt(u + 4);
+ u += 8;
+
+ // reads the bytecode to find the labels
+ int codeStart = u;
+ int codeEnd = u + codeLength;
+ Label[] labels = new Label[codeLength + 2];
+ readLabel(codeLength + 1, labels);
+ while (u < codeEnd) {
+ int offset = u - codeStart;
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ readLabel(offset + readShort(u + 1), labels);
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ readLabel(offset + readInt(u + 1), labels);
+ u += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ u += 6;
+ } else {
+ u += 4;
}
- if (ANNOTATIONS && impanns != 0) {
- readParameterAnnotations(impanns, desc, c, false, mv);
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ readLabel(offset + readInt(u), labels);
+ for (int i = readInt(u + 8) - readInt(u + 4) + 1; i > 0; --i) {
+ readLabel(offset + readInt(u + 12), labels);
+ u += 4;
}
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- mv.visitAttribute(cattrs);
- cattrs = attr;
+ u += 12;
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ readLabel(offset + readInt(u), labels);
+ for (int i = readInt(u + 4); i > 0; --i) {
+ readLabel(offset + readInt(u + 12), labels);
+ u += 8;
}
+ u += 8;
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ u += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ u += 4;
+ break;
}
+ }
- if (mv != null && v != 0) {
- int maxStack = readUnsignedShort(v);
- int maxLocals = readUnsignedShort(v + 2);
- int codeLength = readInt(v + 4);
- v += 8;
+ // reads the try catch entries to find the labels, and also visits them
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ Label start = readLabel(readUnsignedShort(u + 2), labels);
+ Label end = readLabel(readUnsignedShort(u + 4), labels);
+ Label handler = readLabel(readUnsignedShort(u + 6), labels);
+ String type = readUTF8(items[readUnsignedShort(u + 8)], c);
+ mv.visitTryCatchBlock(start, end, handler, type);
+ u += 8;
+ }
+ u += 2;
- int codeStart = v;
- int codeEnd = v + codeLength;
-
- mv.visitCode();
-
- // 1st phase: finds the labels
- int label;
- Label[] labels = new Label[codeLength + 2];
- readLabel(codeLength + 1, labels);
- while (v < codeEnd) {
- w = v - codeStart;
- int opcode = b[v] & 0xFF;
- switch (ClassWriter.TYPE[opcode]) {
- case ClassWriter.NOARG_INSN:
- case ClassWriter.IMPLVAR_INSN:
- v += 1;
- break;
- case ClassWriter.LABEL_INSN:
- readLabel(w + readShort(v + 1), labels);
- v += 3;
- break;
- case ClassWriter.LABELW_INSN:
- readLabel(w + readInt(v + 1), labels);
- v += 5;
- break;
- case ClassWriter.WIDE_INSN:
- opcode = b[v + 1] & 0xFF;
- if (opcode == Opcodes.IINC) {
- v += 6;
- } else {
- v += 4;
- }
- break;
- case ClassWriter.TABL_INSN:
- // skips 0 to 3 padding bytes*
- v = v + 4 - (w & 3);
- // reads instruction
- readLabel(w + readInt(v), labels);
- j = readInt(v + 8) - readInt(v + 4) + 1;
- v += 12;
- for (; j > 0; --j) {
- readLabel(w + readInt(v), labels);
- v += 4;
- }
- break;
- case ClassWriter.LOOK_INSN:
- // skips 0 to 3 padding bytes*
- v = v + 4 - (w & 3);
- // reads instruction
- readLabel(w + readInt(v), labels);
- j = readInt(v + 4);
- v += 8;
- for (; j > 0; --j) {
- readLabel(w + readInt(v + 4), labels);
- v += 8;
- }
- break;
- case ClassWriter.VAR_INSN:
- case ClassWriter.SBYTE_INSN:
- case ClassWriter.LDC_INSN:
- v += 2;
- break;
- case ClassWriter.SHORT_INSN:
- case ClassWriter.LDCW_INSN:
- case ClassWriter.FIELDORMETH_INSN:
- case ClassWriter.TYPE_INSN:
- case ClassWriter.IINC_INSN:
- v += 3;
- break;
- case ClassWriter.ITFMETH_INSN:
- case ClassWriter.INDYMETH_INSN:
- v += 5;
- break;
- // case MANA_INSN:
- default:
- v += 4;
- break;
- }
- }
- // parses the try catch entries
- j = readUnsignedShort(v);
- v += 2;
- for (; j > 0; --j) {
- Label start = readLabel(readUnsignedShort(v), labels);
- Label end = readLabel(readUnsignedShort(v + 2), labels);
- Label handler = readLabel(readUnsignedShort(v + 4), labels);
- int type = readUnsignedShort(v + 6);
- if (type == 0) {
- mv.visitTryCatchBlock(start, end, handler, null);
- } else {
- mv.visitTryCatchBlock(start,
- end,
- handler,
- readUTF8(items[type], c));
- }
- v += 8;
- }
- // parses the local variable, line number tables, and code
- // attributes
- int varTable = 0;
- int varTypeTable = 0;
- int stackMap = 0;
- int stackMapSize = 0;
- int frameCount = 0;
- int frameMode = 0;
- int frameOffset = 0;
- int frameLocalCount = 0;
- int frameLocalDiff = 0;
- int frameStackCount = 0;
- Object[] frameLocal = null;
- Object[] frameStack = null;
- boolean zip = true;
- cattrs = null;
- j = readUnsignedShort(v);
- v += 2;
- for (; j > 0; --j) {
- attrName = readUTF8(v, c);
- if ("LocalVariableTable".equals(attrName)) {
- if (!skipDebug) {
- varTable = v + 6;
- k = readUnsignedShort(v + 6);
- w = v + 8;
- for (; k > 0; --k) {
- label = readUnsignedShort(w);
- if (labels[label] == null) {
- readLabel(label, labels).status |= Label.DEBUG;
- }
- label += readUnsignedShort(w + 2);
- if (labels[label] == null) {
- readLabel(label, labels).status |= Label.DEBUG;
- }
- w += 10;
- }
- }
- } else if ("LocalVariableTypeTable".equals(attrName)) {
- varTypeTable = v + 6;
- } else if ("LineNumberTable".equals(attrName)) {
- if (!skipDebug) {
- k = readUnsignedShort(v + 6);
- w = v + 8;
- for (; k > 0; --k) {
- label = readUnsignedShort(w);
- if (labels[label] == null) {
- readLabel(label, labels).status |= Label.DEBUG;
- }
- labels[label].line = readUnsignedShort(w + 2);
- w += 4;
- }
- }
- } else if (FRAMES && "StackMapTable".equals(attrName)) {
- if ((flags & SKIP_FRAMES) == 0) {
- stackMap = v + 8;
- stackMapSize = readInt(v + 2);
- frameCount = readUnsignedShort(v + 6);
+ // reads the code attributes
+ int varTable = 0;
+ int varTypeTable = 0;
+ boolean zip = true;
+ boolean unzip = (context.flags & EXPAND_FRAMES) != 0;
+ int stackMap = 0;
+ int stackMapSize = 0;
+ int frameCount = 0;
+ Context frame = null;
+ Attribute attributes = null;
+
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ if ("LocalVariableTable".equals(attrName)) {
+ if ((context.flags & SKIP_DEBUG) == 0) {
+ varTable = u + 8;
+ for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) {
+ int label = readUnsignedShort(v + 10);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
}
- /*
- * here we do not extract the labels corresponding to
- * the attribute content. This would require a full
- * parsing of the attribute, which would need to be
- * repeated in the second phase (see below). Instead the
- * content of the attribute is read one frame at a time
- * (i.e. after a frame has been visited, the next frame
- * is read), and the labels it contains are also
- * extracted one frame at a time. Thanks to the ordering
- * of frames, having only a "one frame lookahead" is not
- * a problem, i.e. it is not possible to see an offset
- * smaller than the offset of the current insn and for
- * which no Label exist.
- */
- /*
- * This is not true for UNINITIALIZED type offsets. We
- * solve this by parsing the stack map table without a
- * full decoding (see below).
- */
- } else if (FRAMES && "StackMap".equals(attrName)) {
- if ((flags & SKIP_FRAMES) == 0) {
- stackMap = v + 8;
- stackMapSize = readInt(v + 2);
- frameCount = readUnsignedShort(v + 6);
- zip = false;
- }
- /*
- * IMPORTANT! here we assume that the frames are
- * ordered, as in the StackMapTable attribute, although
- * this is not guaranteed by the attribute format.
- */
- } else {
- for (k = 0; k < attrs.length; ++k) {
- if (attrs[k].type.equals(attrName)) {
- attr = attrs[k].read(this,
- v + 6,
- readInt(v + 2),
- c,
- codeStart - 8,
- labels);
- if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
- }
- }
+ label += readUnsignedShort(v + 12);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
}
+ v += 10;
}
- v += 6 + readInt(v + 2);
}
-
- // 2nd phase: visits each instruction
- if (FRAMES && stackMap != 0) {
- // creates the very first (implicit) frame from the method
- // descriptor
- frameLocal = new Object[maxLocals];
- frameStack = new Object[maxStack];
- if (unzip) {
- int local = 0;
- if ((access & Opcodes.ACC_STATIC) == 0) {
- if ("<init>".equals(name)) {
- frameLocal[local++] = Opcodes.UNINITIALIZED_THIS;
- } else {
- frameLocal[local++] = readClass(header + 2, c);
- }
- }
- j = 1;
- loop: while (true) {
- k = j;
- switch (desc.charAt(j++)) {
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- frameLocal[local++] = Opcodes.INTEGER;
- break;
- case 'F':
- frameLocal[local++] = Opcodes.FLOAT;
- break;
- case 'J':
- frameLocal[local++] = Opcodes.LONG;
- break;
- case 'D':
- frameLocal[local++] = Opcodes.DOUBLE;
- break;
- case '[':
- while (desc.charAt(j) == '[') {
- ++j;
- }
- if (desc.charAt(j) == 'L') {
- ++j;
- while (desc.charAt(j) != ';') {
- ++j;
- }
- }
- frameLocal[local++] = desc.substring(k, ++j);
- break;
- case 'L':
- while (desc.charAt(j) != ';') {
- ++j;
- }
- frameLocal[local++] = desc.substring(k + 1,
- j++);
- break;
- default:
- break loop;
- }
+ } else if ("LocalVariableTypeTable".equals(attrName)) {
+ varTypeTable = u + 8;
+ } else if ("LineNumberTable".equals(attrName)) {
+ if ((context.flags & SKIP_DEBUG) == 0) {
+ for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) {
+ int label = readUnsignedShort(v + 10);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
}
- frameLocalCount = local;
+ labels[label].line = readUnsignedShort(v + 12);
+ v += 4;
}
- /*
- * for the first explicit frame the offset is not
- * offset_delta + 1 but only offset_delta; setting the
- * implicit frame offset to -1 allow the use of the
- * "offset_delta + 1" rule in all cases
- */
- frameOffset = -1;
- /*
- * Finds labels for UNINITIALIZED frame types. Instead of
- * decoding each element of the stack map table, we look
- * for 3 consecutive bytes that "look like" an UNINITIALIZED
- * type (tag 8, offset within code bounds, NEW instruction
- * at this offset). We may find false positives (i.e. not
- * real UNINITIALIZED types), but this should be rare, and
- * the only consequence will be the creation of an unneeded
- * label. This is better than creating a label for each NEW
- * instruction, and faster than fully decoding the whole
- * stack map table.
- */
- for (j = stackMap; j < stackMap + stackMapSize - 2; ++j) {
- if (b[j] == 8) { // UNINITIALIZED FRAME TYPE
- k = readUnsignedShort(j + 1);
- if (k >= 0 && k < codeLength) { // potential offset
- if ((b[codeStart + k] & 0xFF) == Opcodes.NEW) { // NEW at this offset
- readLabel(k, labels);
- }
- }
+ }
+ } else if (FRAMES && "StackMapTable".equals(attrName)) {
+ if ((context.flags & SKIP_FRAMES) == 0) {
+ stackMap = u + 10;
+ stackMapSize = readInt(u + 4);
+ frameCount = readUnsignedShort(u + 8);
+ }
+ /*
+ * here we do not extract the labels corresponding to the
+ * attribute content. This would require a full parsing of the
+ * attribute, which would need to be repeated in the second
+ * phase (see below). Instead the content of the attribute is
+ * read one frame at a time (i.e. after a frame has been
+ * visited, the next frame is read), and the labels it contains
+ * are also extracted one frame at a time. Thanks to the
+ * ordering of frames, having only a "one frame lookahead" is
+ * not a problem, i.e. it is not possible to see an offset
+ * smaller than the offset of the current insn and for which no
+ * Label exist.
+ */
+ /*
+ * This is not true for UNINITIALIZED type offsets. We solve
+ * this by parsing the stack map table without a full decoding
+ * (see below).
+ */
+ } else if (FRAMES && "StackMap".equals(attrName)) {
+ if ((context.flags & SKIP_FRAMES) == 0) {
+ zip = false;
+ stackMap = u + 10;
+ stackMapSize = readInt(u + 4);
+ frameCount = readUnsignedShort(u + 8);
+ }
+ /*
+ * IMPORTANT! here we assume that the frames are ordered, as in
+ * the StackMapTable attribute, although this is not guaranteed
+ * by the attribute format.
+ */
+ } else {
+ for (int j = 0; j < context.attrs.length; ++j) {
+ if (context.attrs[j].type.equals(attrName)) {
+ Attribute attr = context.attrs[j].read(this, u + 8,
+ readInt(u + 4), c, codeStart - 8, labels);
+ if (attr != null) {
+ attr.next = attributes;
+ attributes = attr;
}
}
}
- v = codeStart;
- Label l;
- while (v < codeEnd) {
- w = v - codeStart;
-
- l = labels[w];
- if (l != null) {
- mv.visitLabel(l);
- if (!skipDebug && l.line > 0) {
- mv.visitLineNumber(l.line, l);
+ }
+ u += 6 + readInt(u + 4);
+ }
+ u += 2;
+
+ // generates the first (implicit) stack map frame
+ if (FRAMES && stackMap != 0) {
+ /*
+ * for the first explicit frame the offset is not offset_delta + 1
+ * but only offset_delta; setting the implicit frame offset to -1
+ * allow the use of the "offset_delta + 1" rule in all cases
+ */
+ frame = context;
+ frame.offset = -1;
+ frame.mode = 0;
+ frame.localCount = 0;
+ frame.localDiff = 0;
+ frame.stackCount = 0;
+ frame.local = new Object[maxLocals];
+ frame.stack = new Object[maxStack];
+ if (unzip) {
+ getImplicitFrame(context);
+ }
+ /*
+ * Finds labels for UNINITIALIZED frame types. Instead of decoding
+ * each element of the stack map table, we look for 3 consecutive
+ * bytes that "look like" an UNINITIALIZED type (tag 8, offset
+ * within code bounds, NEW instruction at this offset). We may find
+ * false positives (i.e. not real UNINITIALIZED types), but this
+ * should be rare, and the only consequence will be the creation of
+ * an unneeded label. This is better than creating a label for each
+ * NEW instruction, and faster than fully decoding the whole stack
+ * map table.
+ */
+ for (int i = stackMap; i < stackMap + stackMapSize - 2; ++i) {
+ if (b[i] == 8) { // UNINITIALIZED FRAME TYPE
+ int v = readUnsignedShort(i + 1);
+ if (v >= 0 && v < codeLength) {
+ if ((b[codeStart + v] & 0xFF) == Opcodes.NEW) {
+ readLabel(v, labels);
}
}
+ }
+ }
+ }
- while (FRAMES && frameLocal != null
- && (frameOffset == w || frameOffset == -1))
- {
- // if there is a frame for this offset,
- // makes the visitor visit it,
- // and reads the next frame if there is one.
- if (!zip || unzip) {
- mv.visitFrame(Opcodes.F_NEW,
- frameLocalCount,
- frameLocal,
- frameStackCount,
- frameStack);
- } else if (frameOffset != -1) {
- mv.visitFrame(frameMode,
- frameLocalDiff,
- frameLocal,
- frameStackCount,
- frameStack);
- }
+ // visits the instructions
+ u = codeStart;
+ while (u < codeEnd) {
+ int offset = u - codeStart;
+
+ // visits the label and line number for this offset, if any
+ Label l = labels[offset];
+ if (l != null) {
+ mv.visitLabel(l);
+ if ((context.flags & SKIP_DEBUG) == 0 && l.line > 0) {
+ mv.visitLineNumber(l.line, l);
+ }
+ }
- if (frameCount > 0) {
- int tag, delta, n;
- if (zip) {
- tag = b[stackMap++] & 0xFF;
- } else {
- tag = MethodWriter.FULL_FRAME;
- frameOffset = -1;
- }
- frameLocalDiff = 0;
- if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME)
- {
- delta = tag;
- frameMode = Opcodes.F_SAME;
- frameStackCount = 0;
- } else if (tag < MethodWriter.RESERVED) {
- delta = tag
- - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
- stackMap = readFrameType(frameStack,
- 0,
- stackMap,
- c,
- labels);
- frameMode = Opcodes.F_SAME1;
- frameStackCount = 1;
- } else {
- delta = readUnsignedShort(stackMap);
- stackMap += 2;
- if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
- {
- stackMap = readFrameType(frameStack,
- 0,
- stackMap,
- c,
- labels);
- frameMode = Opcodes.F_SAME1;
- frameStackCount = 1;
- } else if (tag >= MethodWriter.CHOP_FRAME
- && tag < MethodWriter.SAME_FRAME_EXTENDED)
- {
- frameMode = Opcodes.F_CHOP;
- frameLocalDiff = MethodWriter.SAME_FRAME_EXTENDED
- - tag;
- frameLocalCount -= frameLocalDiff;
- frameStackCount = 0;
- } else if (tag == MethodWriter.SAME_FRAME_EXTENDED)
- {
- frameMode = Opcodes.F_SAME;
- frameStackCount = 0;
- } else if (tag < MethodWriter.FULL_FRAME) {
- j = unzip ? frameLocalCount : 0;
- for (k = tag
- - MethodWriter.SAME_FRAME_EXTENDED; k > 0; k--)
- {
- stackMap = readFrameType(frameLocal,
- j++,
- stackMap,
- c,
- labels);
- }
- frameMode = Opcodes.F_APPEND;
- frameLocalDiff = tag
- - MethodWriter.SAME_FRAME_EXTENDED;
- frameLocalCount += frameLocalDiff;
- frameStackCount = 0;
- } else { // if (tag == FULL_FRAME) {
- frameMode = Opcodes.F_FULL;
- n = frameLocalDiff = frameLocalCount = readUnsignedShort(stackMap);
- stackMap += 2;
- for (j = 0; n > 0; n--) {
- stackMap = readFrameType(frameLocal,
- j++,
- stackMap,
- c,
- labels);
- }
- n = frameStackCount = readUnsignedShort(stackMap);
- stackMap += 2;
- for (j = 0; n > 0; n--) {
- stackMap = readFrameType(frameStack,
- j++,
- stackMap,
- c,
- labels);
- }
- }
- }
- frameOffset += delta + 1;
- readLabel(frameOffset, labels);
-
- --frameCount;
- } else {
- frameLocal = null;
- }
+ // visits the frame for this offset, if any
+ while (FRAMES && frame != null
+ && (frame.offset == offset || frame.offset == -1)) {
+ // if there is a frame for this offset, makes the visitor visit
+ // it, and reads the next frame if there is one.
+ if (frame.offset != -1) {
+ if (!zip || unzip) {
+ mv.visitFrame(Opcodes.F_NEW, frame.localCount,
+ frame.local, frame.stackCount, frame.stack);
+ } else {
+ mv.visitFrame(frame.mode, frame.localDiff, frame.local,
+ frame.stackCount, frame.stack);
}
+ }
+ if (frameCount > 0) {
+ stackMap = readFrame(stackMap, zip, unzip, labels, frame);
+ --frameCount;
+ } else {
+ frame = null;
+ }
+ }
- int opcode = b[v] & 0xFF;
- switch (ClassWriter.TYPE[opcode]) {
- case ClassWriter.NOARG_INSN:
- mv.visitInsn(opcode);
- v += 1;
- break;
- case ClassWriter.IMPLVAR_INSN:
- if (opcode > Opcodes.ISTORE) {
- opcode -= 59; // ISTORE_0
- mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
- opcode & 0x3);
- } else {
- opcode -= 26; // ILOAD_0
- mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2),
- opcode & 0x3);
- }
- v += 1;
- break;
- case ClassWriter.LABEL_INSN:
- mv.visitJumpInsn(opcode, labels[w
- + readShort(v + 1)]);
- v += 3;
- break;
- case ClassWriter.LABELW_INSN:
- mv.visitJumpInsn(opcode - 33, labels[w
- + readInt(v + 1)]);
- v += 5;
- break;
- case ClassWriter.WIDE_INSN:
- opcode = b[v + 1] & 0xFF;
- if (opcode == Opcodes.IINC) {
- mv.visitIincInsn(readUnsignedShort(v + 2),
- readShort(v + 4));
- v += 6;
- } else {
- mv.visitVarInsn(opcode,
- readUnsignedShort(v + 2));
- v += 4;
- }
- break;
- case ClassWriter.TABL_INSN:
- // skips 0 to 3 padding bytes
- v = v + 4 - (w & 3);
- // reads instruction
- label = w + readInt(v);
- int min = readInt(v + 4);
- int max = readInt(v + 8);
- v += 12;
- Label[] table = new Label[max - min + 1];
- for (j = 0; j < table.length; ++j) {
- table[j] = labels[w + readInt(v)];
- v += 4;
- }
- mv.visitTableSwitchInsn(min,
- max,
- labels[label],
- table);
- break;
- case ClassWriter.LOOK_INSN:
- // skips 0 to 3 padding bytes
- v = v + 4 - (w & 3);
- // reads instruction
- label = w + readInt(v);
- j = readInt(v + 4);
- v += 8;
- int[] keys = new int[j];
- Label[] values = new Label[j];
- for (j = 0; j < keys.length; ++j) {
- keys[j] = readInt(v);
- values[j] = labels[w + readInt(v + 4)];
- v += 8;
- }
- mv.visitLookupSwitchInsn(labels[label],
- keys,
- values);
- break;
- case ClassWriter.VAR_INSN:
- mv.visitVarInsn(opcode, b[v + 1] & 0xFF);
- v += 2;
- break;
- case ClassWriter.SBYTE_INSN:
- mv.visitIntInsn(opcode, b[v + 1]);
- v += 2;
- break;
- case ClassWriter.SHORT_INSN:
- mv.visitIntInsn(opcode, readShort(v + 1));
- v += 3;
- break;
- case ClassWriter.LDC_INSN:
- mv.visitLdcInsn(readConst(b[v + 1] & 0xFF, c));
- v += 2;
- break;
- case ClassWriter.LDCW_INSN:
- mv.visitLdcInsn(readConst(readUnsignedShort(v + 1),
- c));
- v += 3;
- break;
- case ClassWriter.FIELDORMETH_INSN:
- case ClassWriter.ITFMETH_INSN: {
- int cpIndex = items[readUnsignedShort(v + 1)];
- String iowner = readClass(cpIndex, c);
- cpIndex = items[readUnsignedShort(cpIndex + 2)];
- String iname = readUTF8(cpIndex, c);
- String idesc = readUTF8(cpIndex + 2, c);
- if (opcode < Opcodes.INVOKEVIRTUAL) {
- mv.visitFieldInsn(opcode, iowner, iname, idesc);
- } else {
- mv.visitMethodInsn(opcode, iowner, iname, idesc);
- }
- if (opcode == Opcodes.INVOKEINTERFACE) {
- v += 5;
- } else {
- v += 3;
- }
- break;
- }
- case ClassWriter.INDYMETH_INSN: {
- int cpIndex = items[readUnsignedShort(v + 1)];
- int bsmIndex = bootstrapMethods[readUnsignedShort(cpIndex)];
- cpIndex = items[readUnsignedShort(cpIndex + 2)];
- String iname = readUTF8(cpIndex, c);
- String idesc = readUTF8(cpIndex + 2, c);
-
- int mhIndex = readUnsignedShort(bsmIndex);
- Handle bsm = (Handle) readConst(mhIndex, c);
- int bsmArgCount = readUnsignedShort(bsmIndex + 2);
- Object[] bsmArgs = new Object[bsmArgCount];
- bsmIndex += 4;
- for(int a = 0; a < bsmArgCount; a++) {
- int argIndex = readUnsignedShort(bsmIndex);
- bsmArgs[a] = readConst(argIndex, c);
- bsmIndex += 2;
- }
- mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
-
- v += 5;
- break;
- }
- case ClassWriter.TYPE_INSN:
- mv.visitTypeInsn(opcode, readClass(v + 1, c));
- v += 3;
- break;
- case ClassWriter.IINC_INSN:
- mv.visitIincInsn(b[v + 1] & 0xFF, b[v + 2]);
- v += 3;
- break;
- // case MANA_INSN:
- default:
- mv.visitMultiANewArrayInsn(readClass(v + 1, c),
- b[v + 3] & 0xFF);
- v += 4;
- break;
- }
+ // visits the instruction at this offset
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ mv.visitInsn(opcode);
+ u += 1;
+ break;
+ case ClassWriter.IMPLVAR_INSN:
+ if (opcode > Opcodes.ISTORE) {
+ opcode -= 59; // ISTORE_0
+ mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
+ opcode & 0x3);
+ } else {
+ opcode -= 26; // ILOAD_0
+ mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2), opcode & 0x3);
}
- l = labels[codeEnd - codeStart];
- if (l != null) {
- mv.visitLabel(l);
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ mv.visitJumpInsn(opcode, labels[offset + readShort(u + 1)]);
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ mv.visitJumpInsn(opcode - 33, labels[offset + readInt(u + 1)]);
+ u += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ mv.visitIincInsn(readUnsignedShort(u + 2), readShort(u + 4));
+ u += 6;
+ } else {
+ mv.visitVarInsn(opcode, readUnsignedShort(u + 2));
+ u += 4;
}
- // visits the local variable tables
- if (!skipDebug && varTable != 0) {
- int[] typeTable = null;
- if (varTypeTable != 0) {
- k = readUnsignedShort(varTypeTable) * 3;
- w = varTypeTable + 2;
- typeTable = new int[k];
- while (k > 0) {
- typeTable[--k] = w + 6; // signature
- typeTable[--k] = readUnsignedShort(w + 8); // index
- typeTable[--k] = readUnsignedShort(w); // start
- w += 10;
- }
- }
- k = readUnsignedShort(varTable);
- w = varTable + 2;
- for (; k > 0; --k) {
- int start = readUnsignedShort(w);
- int length = readUnsignedShort(w + 2);
- int index = readUnsignedShort(w + 8);
- String vsignature = null;
- if (typeTable != null) {
- for (int a = 0; a < typeTable.length; a += 3) {
- if (typeTable[a] == start
- && typeTable[a + 1] == index)
- {
- vsignature = readUTF8(typeTable[a + 2], c);
- break;
- }
- }
- }
- mv.visitLocalVariable(readUTF8(w + 4, c),
- readUTF8(w + 6, c),
- vsignature,
- labels[start],
- labels[start + length],
- index);
- w += 10;
- }
+ break;
+ case ClassWriter.TABL_INSN: {
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ int label = offset + readInt(u);
+ int min = readInt(u + 4);
+ int max = readInt(u + 8);
+ Label[] table = new Label[max - min + 1];
+ u += 12;
+ for (int i = 0; i < table.length; ++i) {
+ table[i] = labels[offset + readInt(u)];
+ u += 4;
+ }
+ mv.visitTableSwitchInsn(min, max, labels[label], table);
+ break;
+ }
+ case ClassWriter.LOOK_INSN: {
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ int label = offset + readInt(u);
+ int len = readInt(u + 4);
+ int[] keys = new int[len];
+ Label[] values = new Label[len];
+ u += 8;
+ for (int i = 0; i < len; ++i) {
+ keys[i] = readInt(u);
+ values[i] = labels[offset + readInt(u + 4)];
+ u += 8;
+ }
+ mv.visitLookupSwitchInsn(labels[label], keys, values);
+ break;
+ }
+ case ClassWriter.VAR_INSN:
+ mv.visitVarInsn(opcode, b[u + 1] & 0xFF);
+ u += 2;
+ break;
+ case ClassWriter.SBYTE_INSN:
+ mv.visitIntInsn(opcode, b[u + 1]);
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ mv.visitIntInsn(opcode, readShort(u + 1));
+ u += 3;
+ break;
+ case ClassWriter.LDC_INSN:
+ mv.visitLdcInsn(readConst(b[u + 1] & 0xFF, c));
+ u += 2;
+ break;
+ case ClassWriter.LDCW_INSN:
+ mv.visitLdcInsn(readConst(readUnsignedShort(u + 1), c));
+ u += 3;
+ break;
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.ITFMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(u + 1)];
+ String iowner = readClass(cpIndex, c);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+ if (opcode < Opcodes.INVOKEVIRTUAL) {
+ mv.visitFieldInsn(opcode, iowner, iname, idesc);
+ } else {
+ mv.visitMethodInsn(opcode, iowner, iname, idesc);
}
- // visits the other attributes
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- mv.visitAttribute(cattrs);
- cattrs = attr;
+ if (opcode == Opcodes.INVOKEINTERFACE) {
+ u += 5;
+ } else {
+ u += 3;
}
- // visits the max stack and max locals values
- mv.visitMaxs(maxStack, maxLocals);
+ break;
}
+ case ClassWriter.INDYMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(u + 1)];
+ int bsmIndex = context.bootstrapMethods[readUnsignedShort(cpIndex)];
+ Handle bsm = (Handle) readConst(readUnsignedShort(bsmIndex), c);
+ int bsmArgCount = readUnsignedShort(bsmIndex + 2);
+ Object[] bsmArgs = new Object[bsmArgCount];
+ bsmIndex += 4;
+ for (int i = 0; i < bsmArgCount; i++) {
+ bsmArgs[i] = readConst(readUnsignedShort(bsmIndex), c);
+ bsmIndex += 2;
+ }
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+ mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
+ u += 5;
+ break;
+ }
+ case ClassWriter.TYPE_INSN:
+ mv.visitTypeInsn(opcode, readClass(u + 1, c));
+ u += 3;
+ break;
+ case ClassWriter.IINC_INSN:
+ mv.visitIincInsn(b[u + 1] & 0xFF, b[u + 2]);
+ u += 3;
+ break;
+ // case MANA_INSN:
+ default:
+ mv.visitMultiANewArrayInsn(readClass(u + 1, c), b[u + 3] & 0xFF);
+ u += 4;
+ break;
+ }
+ }
+ if (labels[codeLength] != null) {
+ mv.visitLabel(labels[codeLength]);
+ }
- if (mv != null) {
- mv.visitEnd();
+ // visits the local variable tables
+ if ((context.flags & SKIP_DEBUG) == 0 && varTable != 0) {
+ int[] typeTable = null;
+ if (varTypeTable != 0) {
+ u = varTypeTable + 2;
+ typeTable = new int[readUnsignedShort(varTypeTable) * 3];
+ for (int i = typeTable.length; i > 0;) {
+ typeTable[--i] = u + 6; // signature
+ typeTable[--i] = readUnsignedShort(u + 8); // index
+ typeTable[--i] = readUnsignedShort(u); // start
+ u += 10;
+ }
+ }
+ u = varTable + 2;
+ for (int i = readUnsignedShort(varTable); i > 0; --i) {
+ int start = readUnsignedShort(u);
+ int length = readUnsignedShort(u + 2);
+ int index = readUnsignedShort(u + 8);
+ String vsignature = null;
+ if (typeTable != null) {
+ for (int j = 0; j < typeTable.length; j += 3) {
+ if (typeTable[j] == start && typeTable[j + 1] == index) {
+ vsignature = readUTF8(typeTable[j + 2], c);
+ break;
+ }
+ }
+ }
+ mv.visitLocalVariable(readUTF8(u + 4, c), readUTF8(u + 6, c),
+ vsignature, labels[start], labels[start + length],
+ index);
+ u += 10;
}
}
- // visits the end of the class
- classVisitor.visitEnd();
+ // visits the code attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ mv.visitAttribute(attributes);
+ attributes = attr;
+ }
+
+ // visits the max stack and max locals values
+ mv.visitMaxs(maxStack, maxLocals);
}
/**
* Reads parameter annotations and makes the given visitor visit them.
*
- * @param v start offset in {@link #b b} of the annotations to be read.
- * @param desc the method descriptor.
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param visible <tt>true</tt> if the annotations to be read are visible
- * at runtime.
- * @param mv the visitor that must visit the annotations.
+ * @param v
+ * start offset in {@link #b b} of the annotations to be read.
+ * @param desc
+ * the method descriptor.
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param visible
+ * <tt>true</tt> if the annotations to be read are visible at
+ * runtime.
+ * @param mv
+ * the visitor that must visit the annotations.
*/
- private void readParameterAnnotations(
- int v,
- final String desc,
- final char[] buf,
- final boolean visible,
- final MethodVisitor mv)
- {
+ private void readParameterAnnotations(int v, final String desc,
+ final char[] buf, final boolean visible, final MethodVisitor mv) {
int i;
int n = b[v++] & 0xFF;
// workaround for a bug in javac (javac compiler generates a parameter
@@ -1679,21 +1458,22 @@ public class ClassReader {
/**
* Reads the values of an annotation and makes the given visitor visit them.
*
- * @param v the start offset in {@link #b b} of the values to be read
- * (including the unsigned short that gives the number of values).
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param named if the annotation values are named or not.
- * @param av the visitor that must visit the values.
+ * @param v
+ * the start offset in {@link #b b} of the values to be read
+ * (including the unsigned short that gives the number of
+ * values).
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param named
+ * if the annotation values are named or not.
+ * @param av
+ * the visitor that must visit the values.
* @return the end offset of the annotation values.
*/
- private int readAnnotationValues(
- int v,
- final char[] buf,
- final boolean named,
- final AnnotationVisitor av)
- {
+ private int readAnnotationValues(int v, final char[] buf,
+ final boolean named, final AnnotationVisitor av) {
int i = readUnsignedShort(v);
v += 2;
if (named) {
@@ -1714,210 +1494,371 @@ public class ClassReader {
/**
* Reads a value of an annotation and makes the given visitor visit it.
*
- * @param v the start offset in {@link #b b} of the value to be read (<i>not
- * including the value name constant pool index</i>).
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param name the name of the value to be read.
- * @param av the visitor that must visit the value.
+ * @param v
+ * the start offset in {@link #b b} of the value to be read
+ * (<i>not including the value name constant pool index</i>).
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param name
+ * the name of the value to be read.
+ * @param av
+ * the visitor that must visit the value.
* @return the end offset of the annotation value.
*/
- private int readAnnotationValue(
- int v,
- final char[] buf,
- final String name,
- final AnnotationVisitor av)
- {
+ private int readAnnotationValue(int v, final char[] buf, final String name,
+ final AnnotationVisitor av) {
int i;
if (av == null) {
switch (b[v] & 0xFF) {
- case 'e': // enum_const_value
- return v + 5;
- case '@': // annotation_value
- return readAnnotationValues(v + 3, buf, true, null);
- case '[': // array_value
- return readAnnotationValues(v + 1, buf, false, null);
- default:
- return v + 3;
+ case 'e': // enum_const_value
+ return v + 5;
+ case '@': // annotation_value
+ return readAnnotationValues(v + 3, buf, true, null);
+ case '[': // array_value
+ return readAnnotationValues(v + 1, buf, false, null);
+ default:
+ return v + 3;
}
}
switch (b[v++] & 0xFF) {
- case 'I': // pointer to CONSTANT_Integer
- case 'J': // pointer to CONSTANT_Long
- case 'F': // pointer to CONSTANT_Float
- case 'D': // pointer to CONSTANT_Double
- av.visit(name, readConst(readUnsignedShort(v), buf));
- v += 2;
- break;
- case 'B': // pointer to CONSTANT_Byte
- av.visit(name,
- new Byte((byte) readInt(items[readUnsignedShort(v)])));
- v += 2;
- break;
- case 'Z': // pointer to CONSTANT_Boolean
- av.visit(name, readInt(items[readUnsignedShort(v)]) == 0
- ? Boolean.FALSE
- : Boolean.TRUE);
- v += 2;
- break;
- case 'S': // pointer to CONSTANT_Short
- av.visit(name,
- new Short((short) readInt(items[readUnsignedShort(v)])));
- v += 2;
+ case 'I': // pointer to CONSTANT_Integer
+ case 'J': // pointer to CONSTANT_Long
+ case 'F': // pointer to CONSTANT_Float
+ case 'D': // pointer to CONSTANT_Double
+ av.visit(name, readConst(readUnsignedShort(v), buf));
+ v += 2;
+ break;
+ case 'B': // pointer to CONSTANT_Byte
+ av.visit(name,
+ new Byte((byte) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'Z': // pointer to CONSTANT_Boolean
+ av.visit(name,
+ readInt(items[readUnsignedShort(v)]) == 0 ? Boolean.FALSE
+ : Boolean.TRUE);
+ v += 2;
+ break;
+ case 'S': // pointer to CONSTANT_Short
+ av.visit(name, new Short(
+ (short) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'C': // pointer to CONSTANT_Char
+ av.visit(name, new Character(
+ (char) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 's': // pointer to CONSTANT_Utf8
+ av.visit(name, readUTF8(v, buf));
+ v += 2;
+ break;
+ case 'e': // enum_const_value
+ av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
+ v += 4;
+ break;
+ case 'c': // class_info
+ av.visit(name, Type.getType(readUTF8(v, buf)));
+ v += 2;
+ break;
+ case '@': // annotation_value
+ v = readAnnotationValues(v + 2, buf, true,
+ av.visitAnnotation(name, readUTF8(v, buf)));
+ break;
+ case '[': // array_value
+ int size = readUnsignedShort(v);
+ v += 2;
+ if (size == 0) {
+ return readAnnotationValues(v - 2, buf, false,
+ av.visitArray(name));
+ }
+ switch (this.b[v++] & 0xFF) {
+ case 'B':
+ byte[] bv = new byte[size];
+ for (i = 0; i < size; i++) {
+ bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, bv);
+ --v;
break;
- case 'C': // pointer to CONSTANT_Char
- av.visit(name,
- new Character((char) readInt(items[readUnsignedShort(v)])));
- v += 2;
+ case 'Z':
+ boolean[] zv = new boolean[size];
+ for (i = 0; i < size; i++) {
+ zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
+ v += 3;
+ }
+ av.visit(name, zv);
+ --v;
break;
- case 's': // pointer to CONSTANT_Utf8
- av.visit(name, readUTF8(v, buf));
- v += 2;
+ case 'S':
+ short[] sv = new short[size];
+ for (i = 0; i < size; i++) {
+ sv[i] = (short) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, sv);
+ --v;
break;
- case 'e': // enum_const_value
- av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
- v += 4;
+ case 'C':
+ char[] cv = new char[size];
+ for (i = 0; i < size; i++) {
+ cv[i] = (char) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, cv);
+ --v;
break;
- case 'c': // class_info
- av.visit(name, Type.getType(readUTF8(v, buf)));
- v += 2;
+ case 'I':
+ int[] iv = new int[size];
+ for (i = 0; i < size; i++) {
+ iv[i] = readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, iv);
+ --v;
break;
- case '@': // annotation_value
- v = readAnnotationValues(v + 2,
- buf,
- true,
- av.visitAnnotation(name, readUTF8(v, buf)));
+ case 'J':
+ long[] lv = new long[size];
+ for (i = 0; i < size; i++) {
+ lv[i] = readLong(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, lv);
+ --v;
break;
- case '[': // array_value
- int size = readUnsignedShort(v);
- v += 2;
- if (size == 0) {
- return readAnnotationValues(v - 2,
- buf,
- false,
- av.visitArray(name));
+ case 'F':
+ float[] fv = new float[size];
+ for (i = 0; i < size; i++) {
+ fv[i] = Float
+ .intBitsToFloat(readInt(items[readUnsignedShort(v)]));
+ v += 3;
}
- switch (this.b[v++] & 0xFF) {
- case 'B':
- byte[] bv = new byte[size];
- for (i = 0; i < size; i++) {
- bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, bv);
- --v;
- break;
- case 'Z':
- boolean[] zv = new boolean[size];
- for (i = 0; i < size; i++) {
- zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
- v += 3;
- }
- av.visit(name, zv);
- --v;
- break;
- case 'S':
- short[] sv = new short[size];
- for (i = 0; i < size; i++) {
- sv[i] = (short) readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, sv);
- --v;
- break;
- case 'C':
- char[] cv = new char[size];
- for (i = 0; i < size; i++) {
- cv[i] = (char) readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, cv);
- --v;
- break;
- case 'I':
- int[] iv = new int[size];
- for (i = 0; i < size; i++) {
- iv[i] = readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, iv);
- --v;
- break;
- case 'J':
- long[] lv = new long[size];
- for (i = 0; i < size; i++) {
- lv[i] = readLong(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, lv);
- --v;
- break;
- case 'F':
- float[] fv = new float[size];
- for (i = 0; i < size; i++) {
- fv[i] = Float.intBitsToFloat(readInt(items[readUnsignedShort(v)]));
- v += 3;
- }
- av.visit(name, fv);
- --v;
- break;
- case 'D':
- double[] dv = new double[size];
- for (i = 0; i < size; i++) {
- dv[i] = Double.longBitsToDouble(readLong(items[readUnsignedShort(v)]));
- v += 3;
- }
- av.visit(name, dv);
- --v;
- break;
- default:
- v = readAnnotationValues(v - 3,
- buf,
- false,
- av.visitArray(name));
+ av.visit(name, fv);
+ --v;
+ break;
+ case 'D':
+ double[] dv = new double[size];
+ for (i = 0; i < size; i++) {
+ dv[i] = Double
+ .longBitsToDouble(readLong(items[readUnsignedShort(v)]));
+ v += 3;
}
+ av.visit(name, dv);
+ --v;
+ break;
+ default:
+ v = readAnnotationValues(v - 3, buf, false, av.visitArray(name));
+ }
}
return v;
}
- private int readFrameType(
- final Object[] frame,
- final int index,
- int v,
- final char[] buf,
- final Label[] labels)
- {
- int type = b[v++] & 0xFF;
- switch (type) {
- case 0:
- frame[index] = Opcodes.TOP;
- break;
- case 1:
- frame[index] = Opcodes.INTEGER;
- break;
- case 2:
- frame[index] = Opcodes.FLOAT;
+ /**
+ * Computes the implicit frame of the method currently being parsed (as
+ * defined in the given {@link Context}) and stores it in the given context.
+ *
+ * @param frame
+ * information about the class being parsed.
+ */
+ private void getImplicitFrame(final Context frame) {
+ String desc = frame.desc;
+ Object[] locals = frame.local;
+ int local = 0;
+ if ((frame.access & Opcodes.ACC_STATIC) == 0) {
+ if ("<init>".equals(frame.name)) {
+ locals[local++] = Opcodes.UNINITIALIZED_THIS;
+ } else {
+ locals[local++] = readClass(header + 2, frame.buffer);
+ }
+ }
+ int i = 1;
+ loop: while (true) {
+ int j = i;
+ switch (desc.charAt(i++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ locals[local++] = Opcodes.INTEGER;
break;
- case 3:
- frame[index] = Opcodes.DOUBLE;
+ case 'F':
+ locals[local++] = Opcodes.FLOAT;
break;
- case 4:
- frame[index] = Opcodes.LONG;
+ case 'J':
+ locals[local++] = Opcodes.LONG;
break;
- case 5:
- frame[index] = Opcodes.NULL;
+ case 'D':
+ locals[local++] = Opcodes.DOUBLE;
break;
- case 6:
- frame[index] = Opcodes.UNINITIALIZED_THIS;
+ case '[':
+ while (desc.charAt(i) == '[') {
+ ++i;
+ }
+ if (desc.charAt(i) == 'L') {
+ ++i;
+ while (desc.charAt(i) != ';') {
+ ++i;
+ }
+ }
+ locals[local++] = desc.substring(j, ++i);
break;
- case 7: // Object
- frame[index] = readClass(v, buf);
- v += 2;
+ case 'L':
+ while (desc.charAt(i) != ';') {
+ ++i;
+ }
+ locals[local++] = desc.substring(j + 1, i++);
break;
- default: // Uninitialized
- frame[index] = readLabel(readUnsignedShort(v), labels);
- v += 2;
+ default:
+ break loop;
+ }
+ }
+ frame.localCount = local;
+ }
+
+ /**
+ * Reads a stack map frame and stores the result in the given
+ * {@link Context} object.
+ *
+ * @param stackMap
+ * the start offset of a stack map frame in the class file.
+ * @param zip
+ * if the stack map frame at stackMap is compressed or not.
+ * @param unzip
+ * if the stack map frame must be uncompressed.
+ * @param labels
+ * the labels of the method currently being parsed, indexed by
+ * their offset. A new label for the parsed stack map frame is
+ * stored in this array if it does not already exist.
+ * @param frame
+ * where the parsed stack map frame must be stored.
+ * @return the offset of the first byte following the parsed frame.
+ */
+ private int readFrame(int stackMap, boolean zip, boolean unzip,
+ Label[] labels, Context frame) {
+ char[] c = frame.buffer;
+ int tag;
+ int delta;
+ if (zip) {
+ tag = b[stackMap++] & 0xFF;
+ } else {
+ tag = MethodWriter.FULL_FRAME;
+ frame.offset = -1;
+ }
+ frame.localDiff = 0;
+ if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME) {
+ delta = tag;
+ frame.mode = Opcodes.F_SAME;
+ frame.stackCount = 0;
+ } else if (tag < MethodWriter.RESERVED) {
+ delta = tag - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
+ stackMap = readFrameType(frame.stack, 0, stackMap, c, labels);
+ frame.mode = Opcodes.F_SAME1;
+ frame.stackCount = 1;
+ } else {
+ delta = readUnsignedShort(stackMap);
+ stackMap += 2;
+ if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) {
+ stackMap = readFrameType(frame.stack, 0, stackMap, c, labels);
+ frame.mode = Opcodes.F_SAME1;
+ frame.stackCount = 1;
+ } else if (tag >= MethodWriter.CHOP_FRAME
+ && tag < MethodWriter.SAME_FRAME_EXTENDED) {
+ frame.mode = Opcodes.F_CHOP;
+ frame.localDiff = MethodWriter.SAME_FRAME_EXTENDED - tag;
+ frame.localCount -= frame.localDiff;
+ frame.stackCount = 0;
+ } else if (tag == MethodWriter.SAME_FRAME_EXTENDED) {
+ frame.mode = Opcodes.F_SAME;
+ frame.stackCount = 0;
+ } else if (tag < MethodWriter.FULL_FRAME) {
+ int local = unzip ? frame.localCount : 0;
+ for (int i = tag - MethodWriter.SAME_FRAME_EXTENDED; i > 0; i--) {
+ stackMap = readFrameType(frame.local, local++, stackMap, c,
+ labels);
+ }
+ frame.mode = Opcodes.F_APPEND;
+ frame.localDiff = tag - MethodWriter.SAME_FRAME_EXTENDED;
+ frame.localCount += frame.localDiff;
+ frame.stackCount = 0;
+ } else { // if (tag == FULL_FRAME) {
+ frame.mode = Opcodes.F_FULL;
+ int n = readUnsignedShort(stackMap);
+ stackMap += 2;
+ frame.localDiff = n;
+ frame.localCount = n;
+ for (int local = 0; n > 0; n--) {
+ stackMap = readFrameType(frame.local, local++, stackMap, c,
+ labels);
+ }
+ n = readUnsignedShort(stackMap);
+ stackMap += 2;
+ frame.stackCount = n;
+ for (int stack = 0; n > 0; n--) {
+ stackMap = readFrameType(frame.stack, stack++, stackMap, c,
+ labels);
+ }
+ }
+ }
+ frame.offset += delta + 1;
+ readLabel(frame.offset, labels);
+ return stackMap;
+ }
+
+ /**
+ * Reads a stack map frame type and stores it at the given index in the
+ * given array.
+ *
+ * @param frame
+ * the array where the parsed type must be stored.
+ * @param index
+ * the index in 'frame' where the parsed type must be stored.
+ * @param v
+ * the start offset of the stack map frame type to read.
+ * @param buf
+ * a buffer to read strings.
+ * @param labels
+ * the labels of the method currently being parsed, indexed by
+ * their offset. If the parsed type is an Uninitialized type, a
+ * new label for the corresponding NEW instruction is stored in
+ * this array if it does not already exist.
+ * @return the offset of the first byte after the parsed type.
+ */
+ private int readFrameType(final Object[] frame, final int index, int v,
+ final char[] buf, final Label[] labels) {
+ int type = b[v++] & 0xFF;
+ switch (type) {
+ case 0:
+ frame[index] = Opcodes.TOP;
+ break;
+ case 1:
+ frame[index] = Opcodes.INTEGER;
+ break;
+ case 2:
+ frame[index] = Opcodes.FLOAT;
+ break;
+ case 3:
+ frame[index] = Opcodes.DOUBLE;
+ break;
+ case 4:
+ frame[index] = Opcodes.LONG;
+ break;
+ case 5:
+ frame[index] = Opcodes.NULL;
+ break;
+ case 6:
+ frame[index] = Opcodes.UNINITIALIZED_THIS;
+ break;
+ case 7: // Object
+ frame[index] = readClass(v, buf);
+ v += 2;
+ break;
+ default: // Uninitialized
+ frame[index] = readLabel(readUnsignedShort(v), labels);
+ v += 2;
}
return v;
}
@@ -1927,10 +1868,12 @@ public class ClassReader {
* implementation of this method creates a label for the given offset if it
* has not been already created.
*
- * @param offset a bytecode offset in a method.
- * @param labels the already created labels, indexed by their offset. If a
- * label already exists for offset this method must not create a new
- * one. Otherwise it must store the new label in this array.
+ * @param offset
+ * a bytecode offset in a method.
+ * @param labels
+ * the already created labels, indexed by their offset. If a
+ * label already exists for offset this method must not create a
+ * new one. Otherwise it must store the new label in this array.
* @return a non null Label, which must be equal to labels[offset].
*/
protected Label readLabel(int offset, Label[] labels) {
@@ -1941,39 +1884,67 @@ public class ClassReader {
}
/**
+ * Returns the start index of the attribute_info structure of this class.
+ *
+ * @return the start index of the attribute_info structure of this class.
+ */
+ private int getAttributes() {
+ // skips the header
+ int u = header + 8 + readUnsignedShort(header + 6) * 2;
+ // skips fields and methods
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ for (int j = readUnsignedShort(u + 8); j > 0; --j) {
+ u += 6 + readInt(u + 12);
+ }
+ u += 8;
+ }
+ u += 2;
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ for (int j = readUnsignedShort(u + 8); j > 0; --j) {
+ u += 6 + readInt(u + 12);
+ }
+ u += 8;
+ }
+ // the attribute_info structure starts just after the methods
+ return u + 2;
+ }
+
+ /**
* Reads an attribute in {@link #b b}.
*
- * @param attrs prototypes of the attributes that must be parsed during the
- * visit of the class. Any attribute whose type is not equal to the
- * type of one the prototypes is ignored (i.e. an empty
- * {@link Attribute} instance is returned).
- * @param type the type of the attribute.
- * @param off index of the first byte of the attribute's content in
- * {@link #b b}. The 6 attribute header bytes, containing the type
- * and the length of the attribute, are not taken into account here
- * (they have already been read).
- * @param len the length of the attribute's content.
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param codeOff index of the first byte of code's attribute content in
- * {@link #b b}, or -1 if the attribute to be read is not a code
- * attribute. The 6 attribute header bytes, containing the type and
- * the length of the attribute, are not taken into account here.
- * @param labels the labels of the method's code, or <tt>null</tt> if the
- * attribute to be read is not a code attribute.
+ * @param attrs
+ * prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to
+ * the type of one the prototypes is ignored (i.e. an empty
+ * {@link Attribute} instance is returned).
+ * @param type
+ * the type of the attribute.
+ * @param off
+ * index of the first byte of the attribute's content in
+ * {@link #b b}. The 6 attribute header bytes, containing the
+ * type and the length of the attribute, are not taken into
+ * account here (they have already been read).
+ * @param len
+ * the length of the attribute's content.
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param codeOff
+ * index of the first byte of code's attribute content in
+ * {@link #b b}, or -1 if the attribute to be read is not a code
+ * attribute. The 6 attribute header bytes, containing the type
+ * and the length of the attribute, are not taken into account
+ * here.
+ * @param labels
+ * the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
* @return the attribute that has been read, or <tt>null</tt> to skip this
* attribute.
*/
- private Attribute readAttribute(
- final Attribute[] attrs,
- final String type,
- final int off,
- final int len,
- final char[] buf,
- final int codeOff,
- final Label[] labels)
- {
+ private Attribute readAttribute(final Attribute[] attrs, final String type,
+ final int off, final int len, final char[] buf, final int codeOff,
+ final Label[] labels) {
for (int i = 0; i < attrs.length; ++i) {
if (attrs[i].type.equals(type)) {
return attrs[i].read(this, off, len, buf, codeOff, labels);
@@ -1987,9 +1958,9 @@ public class ClassReader {
// ------------------------------------------------------------------------
/**
- * Returns the number of constant pool items in {@link #b b}.
+ * Returns the number of constant pool items in {@link #b b}.
*
- * @return the number of constant pool items in {@link #b b}.
+ * @return the number of constant pool items in {@link #b b}.
*/
public int getItemCount() {
return items.length;
@@ -2000,7 +1971,8 @@ public class ClassReader {
* one. <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param item the index a constant pool item.
+ * @param item
+ * the index a constant pool item.
* @return the start index of the constant pool item in {@link #b b}, plus
* one.
*/
@@ -2024,7 +1996,8 @@ public class ClassReader {
* {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public int readByte(final int index) {
@@ -2032,11 +2005,12 @@ public class ClassReader {
}
/**
- * Reads an unsigned short value in {@link #b b}. <i>This method is
- * intended for {@link Attribute} sub classes, and is normally not needed by
- * class generators or adapters.</i>
+ * Reads an unsigned short value in {@link #b b}. <i>This method is intended
+ * for {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public int readUnsignedShort(final int index) {
@@ -2049,7 +2023,8 @@ public class ClassReader {
* for {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public short readShort(final int index) {
@@ -2062,7 +2037,8 @@ public class ClassReader {
* {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public int readInt(final int index) {
@@ -2072,11 +2048,12 @@ public class ClassReader {
}
/**
- * Reads a signed long value in {@link #b b}. <i>This method is intended
- * for {@link Attribute} sub classes, and is normally not needed by class
+ * Reads a signed long value in {@link #b b}. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public long readLong(final int index) {
@@ -2090,14 +2067,19 @@ public class ClassReader {
* is intended for {@link Attribute} sub classes, and is normally not needed
* by class generators or adapters.</i>
*
- * @param index the start index of an unsigned short value in {@link #b b},
- * whose value is the index of an UTF8 constant pool item.
- * @param buf buffer to be used to read the item. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param index
+ * the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of an UTF8 constant pool item.
+ * @param buf
+ * buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the String corresponding to the specified UTF8 item.
*/
public String readUTF8(int index, final char[] buf) {
int item = readUnsignedShort(index);
+ if (index == 0 || item == 0) {
+ return null;
+ }
String s = strings[item];
if (s != null) {
return s;
@@ -2109,10 +2091,13 @@ public class ClassReader {
/**
* Reads UTF8 string in {@link #b b}.
*
- * @param index start offset of the UTF8 string to be read.
- * @param utfLen length of the UTF8 string to be read.
- * @param buf buffer to be used to read the string. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param index
+ * start offset of the UTF8 string to be read.
+ * @param utfLen
+ * length of the UTF8 string to be read.
+ * @param buf
+ * buffer to be used to read the string. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the String corresponding to the specified UTF8 string.
*/
private String readUTF(int index, final int utfLen, final char[] buf) {
@@ -2125,28 +2110,28 @@ public class ClassReader {
while (index < endIndex) {
c = b[index++];
switch (st) {
- case 0:
- c = c & 0xFF;
- if (c < 0x80) { // 0xxxxxxx
- buf[strLen++] = (char) c;
- } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx
- cc = (char) (c & 0x1F);
- st = 1;
- } else { // 1110 xxxx 10xx xxxx 10xx xxxx
- cc = (char) (c & 0x0F);
- st = 2;
- }
- break;
+ case 0:
+ c = c & 0xFF;
+ if (c < 0x80) { // 0xxxxxxx
+ buf[strLen++] = (char) c;
+ } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx
+ cc = (char) (c & 0x1F);
+ st = 1;
+ } else { // 1110 xxxx 10xx xxxx 10xx xxxx
+ cc = (char) (c & 0x0F);
+ st = 2;
+ }
+ break;
- case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char
- buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
- st = 0;
- break;
+ case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char
+ buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
+ st = 0;
+ break;
- case 2: // byte 2 of 3-byte char
- cc = (char) ((cc << 6) | (c & 0x3F));
- st = 1;
- break;
+ case 2: // byte 2 of 3-byte char
+ cc = (char) ((cc << 6) | (c & 0x3F));
+ st = 1;
+ break;
}
}
return new String(buf, 0, strLen);
@@ -2157,10 +2142,12 @@ public class ClassReader {
* intended for {@link Attribute} sub classes, and is normally not needed by
* class generators or adapters.</i>
*
- * @param index the start index of an unsigned short value in {@link #b b},
- * whose value is the index of a class constant pool item.
- * @param buf buffer to be used to read the item. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param index
+ * the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of a class constant pool item.
+ * @param buf
+ * buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the String corresponding to the specified class item.
*/
public String readClass(final int index, final char[] buf) {
@@ -2175,9 +2162,11 @@ public class ClassReader {
* method is intended for {@link Attribute} sub classes, and is normally not
* needed by class generators or adapters.</i>
*
- * @param item the index of a constant pool item.
- * @param buf buffer to be used to read the item. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param item
+ * the index of a constant pool item.
+ * @param buf
+ * buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the {@link Integer}, {@link Float}, {@link Long}, {@link Double},
* {@link String}, {@link Type} or {@link Handle} corresponding to
* the given constant pool item.
@@ -2185,32 +2174,29 @@ public class ClassReader {
public Object readConst(final int item, final char[] buf) {
int index = items[item];
switch (b[index - 1]) {
- case ClassWriter.INT:
- return new Integer(readInt(index));
- case ClassWriter.FLOAT:
- return new Float(Float.intBitsToFloat(readInt(index)));
- case ClassWriter.LONG:
- return new Long(readLong(index));
- case ClassWriter.DOUBLE:
- return new Double(Double.longBitsToDouble(readLong(index)));
- case ClassWriter.CLASS:
- return Type.getObjectType(readUTF8(index, buf));
- case ClassWriter.STR:
- return readUTF8(index, buf);
- case ClassWriter.MTYPE:
- return Type.getMethodType(readUTF8(index, buf));
-
- //case ClassWriter.HANDLE_BASE + [1..9]:
- default: {
- int tag = readByte(index);
- int[] items = this.items;
- int cpIndex = items[readUnsignedShort(index + 1)];
- String owner = readClass(cpIndex, buf);
- cpIndex = items[readUnsignedShort(cpIndex + 2)];
- String name = readUTF8(cpIndex, buf);
- String desc = readUTF8(cpIndex + 2, buf);
- return new Handle(tag, owner, name, desc);
- }
+ case ClassWriter.INT:
+ return new Integer(readInt(index));
+ case ClassWriter.FLOAT:
+ return new Float(Float.intBitsToFloat(readInt(index)));
+ case ClassWriter.LONG:
+ return new Long(readLong(index));
+ case ClassWriter.DOUBLE:
+ return new Double(Double.longBitsToDouble(readLong(index)));
+ case ClassWriter.CLASS:
+ return Type.getObjectType(readUTF8(index, buf));
+ case ClassWriter.STR:
+ return readUTF8(index, buf);
+ case ClassWriter.MTYPE:
+ return Type.getMethodType(readUTF8(index, buf));
+ default: // case ClassWriter.HANDLE_BASE + [1..9]:
+ int tag = readByte(index);
+ int[] items = this.items;
+ int cpIndex = items[readUnsignedShort(index + 1)];
+ String owner = readClass(cpIndex, buf);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String name = readUTF8(cpIndex, buf);
+ String desc = readUTF8(cpIndex + 2, buf);
+ return new Handle(tag, owner, name, desc);
}
}
}
diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java
index ae38ae0ab9..3fc364d5e5 100644
--- a/src/asm/scala/tools/asm/ClassVisitor.java
+++ b/src/asm/scala/tools/asm/ClassVisitor.java
@@ -30,11 +30,11 @@
package scala.tools.asm;
/**
- * A visitor to visit a Java class. The methods of this class must be called
- * in the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
+ * A visitor to visit a Java class. The methods of this class must be called in
+ * the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
* <tt>visitOuterClass</tt> ] ( <tt>visitAnnotation</tt> |
- * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> |
- * <tt>visitField</tt> | <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
+ * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> | <tt>visitField</tt> |
+ * <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
*
* @author Eric Bruneton
*/
@@ -55,8 +55,9 @@ public abstract class ClassVisitor {
/**
* Constructs a new {@link ClassVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public ClassVisitor(final int api) {
this(api, null);
@@ -65,15 +66,17 @@ public abstract class ClassVisitor {
/**
* Constructs a new {@link ClassVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param cv the class visitor to which this visitor must delegate method
- * calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv
+ * the class visitor to which this visitor must delegate method
+ * calls. May be null.
*/
public ClassVisitor(final int api, final ClassVisitor cv) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.cv = cv;
}
@@ -81,30 +84,30 @@ public abstract class ClassVisitor {
/**
* Visits the header of the class.
*
- * @param version the class version.
- * @param access the class's access flags (see {@link Opcodes}). This
- * parameter also indicates if the class is deprecated.
- * @param name the internal name of the class (see
- * {@link Type#getInternalName() getInternalName}).
- * @param signature the signature of this class. May be <tt>null</tt> if
- * the class is not a generic one, and does not extend or implement
- * generic classes or interfaces.
- * @param superName the internal of name of the super class (see
- * {@link Type#getInternalName() getInternalName}). For interfaces,
- * the super class is {@link Object}. May be <tt>null</tt>, but
- * only for the {@link Object} class.
- * @param interfaces the internal names of the class's interfaces (see
- * {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param version
+ * the class version.
+ * @param access
+ * the class's access flags (see {@link Opcodes}). This parameter
+ * also indicates if the class is deprecated.
+ * @param name
+ * the internal name of the class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param signature
+ * the signature of this class. May be <tt>null</tt> if the class
+ * is not a generic one, and does not extend or implement generic
+ * classes or interfaces.
+ * @param superName
+ * the internal of name of the super class (see
+ * {@link Type#getInternalName() getInternalName}). For
+ * interfaces, the super class is {@link Object}. May be
+ * <tt>null</tt>, but only for the {@link Object} class.
+ * @param interfaces
+ * the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
- public void visit(
- int version,
- int access,
- String name,
- String signature,
- String superName,
- String[] interfaces)
- {
+ public void visit(int version, int access, String name, String signature,
+ String superName, String[] interfaces) {
if (cv != null) {
cv.visit(version, access, name, signature, superName, interfaces);
}
@@ -113,11 +116,13 @@ public abstract class ClassVisitor {
/**
* Visits the source of the class.
*
- * @param source the name of the source file from which the class was
- * compiled. May be <tt>null</tt>.
- * @param debug additional debug information to compute the correspondance
- * between source and compiled elements of the class. May be
- * <tt>null</tt>.
+ * @param source
+ * the name of the source file from which the class was compiled.
+ * May be <tt>null</tt>.
+ * @param debug
+ * additional debug information to compute the correspondance
+ * between source and compiled elements of the class. May be
+ * <tt>null</tt>.
*/
public void visitSource(String source, String debug) {
if (cv != null) {
@@ -129,16 +134,19 @@ public abstract class ClassVisitor {
* Visits the enclosing class of the class. This method must be called only
* if the class has an enclosing class.
*
- * @param owner internal name of the enclosing class of the class.
- * @param name the name of the method that contains the class, or
- * <tt>null</tt> if the class is not enclosed in a method of its
- * enclosing class.
- * @param desc the descriptor of the method that contains the class, or
- * <tt>null</tt> if the class is not enclosed in a method of its
- * enclosing class.
+ * @param owner
+ * internal name of the enclosing class of the class.
+ * @param name
+ * the name of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
+ * @param desc
+ * the descriptor of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
*/
public void visitOuterClass(String owner, String name, String desc) {
- if (cv != null) {
+ if (cv != null) {
cv.visitOuterClass(owner, name, desc);
}
}
@@ -146,8 +154,10 @@ public abstract class ClassVisitor {
/**
* Visits an annotation of the class.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
@@ -161,7 +171,8 @@ public abstract class ClassVisitor {
/**
* Visits a non standard attribute of the class.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(Attribute attr) {
if (cv != null) {
@@ -173,23 +184,22 @@ public abstract class ClassVisitor {
* Visits information about an inner class. This inner class is not
* necessarily a member of the class being visited.
*
- * @param name the internal name of an inner class (see
- * {@link Type#getInternalName() getInternalName}).
- * @param outerName the internal name of the class to which the inner class
- * belongs (see {@link Type#getInternalName() getInternalName}). May
- * be <tt>null</tt> for not member classes.
- * @param innerName the (simple) name of the inner class inside its
- * enclosing class. May be <tt>null</tt> for anonymous inner
- * classes.
- * @param access the access flags of the inner class as originally declared
- * in the enclosing class.
+ * @param name
+ * the internal name of an inner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param outerName
+ * the internal name of the class to which the inner class
+ * belongs (see {@link Type#getInternalName() getInternalName}).
+ * May be <tt>null</tt> for not member classes.
+ * @param innerName
+ * the (simple) name of the inner class inside its enclosing
+ * class. May be <tt>null</tt> for anonymous inner classes.
+ * @param access
+ * the access flags of the inner class as originally declared in
+ * the enclosing class.
*/
- public void visitInnerClass(
- String name,
- String outerName,
- String innerName,
- int access)
- {
+ public void visitInnerClass(String name, String outerName,
+ String innerName, int access) {
if (cv != null) {
cv.visitInnerClass(name, outerName, innerName, access);
}
@@ -198,33 +208,32 @@ public abstract class ClassVisitor {
/**
* Visits a field of the class.
*
- * @param access the field's access flags (see {@link Opcodes}). This
- * parameter also indicates if the field is synthetic and/or
- * deprecated.
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link Type Type}).
- * @param signature the field's signature. May be <tt>null</tt> if the
- * field's type does not use generic types.
- * @param value the field's initial value. This parameter, which may be
- * <tt>null</tt> if the field does not have an initial value, must
- * be an {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String} (for <tt>int</tt>,
- * <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
- * respectively). <i>This parameter is only used for static fields</i>.
- * Its value is ignored for non static fields, which must be
- * initialized through bytecode instructions in constructors or
- * methods.
+ * @param access
+ * the field's access flags (see {@link Opcodes}). This parameter
+ * also indicates if the field is synthetic and/or deprecated.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link Type Type}).
+ * @param signature
+ * the field's signature. May be <tt>null</tt> if the field's
+ * type does not use generic types.
+ * @param value
+ * the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value,
+ * must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String} (for <tt>int</tt>,
+ * <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
+ * respectively). <i>This parameter is only used for static
+ * fields</i>. Its value is ignored for non static fields, which
+ * must be initialized through bytecode instructions in
+ * constructors or methods.
* @return a visitor to visit field annotations and attributes, or
- * <tt>null</tt> if this class visitor is not interested in
- * visiting these annotations and attributes.
+ * <tt>null</tt> if this class visitor is not interested in visiting
+ * these annotations and attributes.
*/
- public FieldVisitor visitField(
- int access,
- String name,
- String desc,
- String signature,
- Object value)
- {
+ public FieldVisitor visitField(int access, String name, String desc,
+ String signature, Object value) {
if (cv != null) {
return cv.visitField(access, name, desc, signature, value);
}
@@ -233,31 +242,31 @@ public abstract class ClassVisitor {
/**
* Visits a method of the class. This method <i>must</i> return a new
- * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is
- * called, i.e., it should not return a previously returned visitor.
+ * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is called,
+ * i.e., it should not return a previously returned visitor.
*
- * @param access the method's access flags (see {@link Opcodes}). This
- * parameter also indicates if the method is synthetic and/or
- * deprecated.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
- * @param signature the method's signature. May be <tt>null</tt> if the
- * method parameters, return type and exceptions do not use generic
- * types.
- * @param exceptions the internal names of the method's exception classes
- * (see {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param access
+ * the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt> if the method
+ * parameters, return type and exceptions do not use generic
+ * types.
+ * @param exceptions
+ * the internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
* @return an object to visit the byte code of the method, or <tt>null</tt>
* if this class visitor is not interested in visiting the code of
* this method.
*/
- public MethodVisitor visitMethod(
- int access,
- String name,
- String desc,
- String signature,
- String[] exceptions)
- {
+ public MethodVisitor visitMethod(int access, String name, String desc,
+ String signature, String[] exceptions) {
if (cv != null) {
return cv.visitMethod(access, name, desc, signature, exceptions);
}
diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java
index c7a0736b51..93ed7313c7 100644
--- a/src/asm/scala/tools/asm/ClassWriter.java
+++ b/src/asm/scala/tools/asm/ClassWriter.java
@@ -66,12 +66,18 @@ public class ClassWriter extends ClassVisitor {
public static final int COMPUTE_FRAMES = 2;
/**
- * Pseudo access flag to distinguish between the synthetic attribute and
- * the synthetic access flag.
+ * Pseudo access flag to distinguish between the synthetic attribute and the
+ * synthetic access flag.
*/
static final int ACC_SYNTHETIC_ATTRIBUTE = 0x40000;
/**
+ * Factor to convert from ACC_SYNTHETIC_ATTRIBUTE to Opcode.ACC_SYNTHETIC.
+ */
+ static final int TO_ACC_SYNTHETIC = ACC_SYNTHETIC_ATTRIBUTE
+ / Opcodes.ACC_SYNTHETIC;
+
+ /**
* The type of instructions without any argument.
*/
static final int NOARG_INSN = 0;
@@ -238,8 +244,8 @@ public class ClassWriter extends ClassVisitor {
/**
* The base value for all CONSTANT_MethodHandle constant pool items.
- * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into
- * 9 different items.
+ * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into 9
+ * different items.
*/
static final int HANDLE_BASE = 20;
@@ -266,9 +272,8 @@ public class ClassWriter extends ClassVisitor {
static final int TYPE_MERGED = 32;
/**
- * The type of BootstrapMethods items. These items are stored in a
- * special class attribute named BootstrapMethods and
- * not in the constant pool.
+ * The type of BootstrapMethods items. These items are stored in a special
+ * class attribute named BootstrapMethods and not in the constant pool.
*/
static final int BSM = 33;
@@ -327,10 +332,10 @@ public class ClassWriter extends ClassVisitor {
* necessarily be stored in the constant pool. This type table is used by
* the control flow and data flow analysis algorithm used to compute stack
* map frames from scratch. This array associates to each index <tt>i</tt>
- * the Item whose index is <tt>i</tt>. All Item objects stored in this
- * array are also stored in the {@link #items} hash table. These two arrays
- * allow to retrieve an Item from its index or, conversely, to get the index
- * of an Item from its value. Each Item stores an internal name in its
+ * the Item whose index is <tt>i</tt>. All Item objects stored in this array
+ * are also stored in the {@link #items} hash table. These two arrays allow
+ * to retrieve an Item from its index or, conversely, to get the index of an
+ * Item from its value. Each Item stores an internal name in its
* {@link Item#strVal1} field.
*/
Item[] typeTable;
@@ -439,16 +444,16 @@ public class ClassWriter extends ClassVisitor {
/**
* The fields of this class. These fields are stored in a linked list of
* {@link FieldWriter} objects, linked to each other by their
- * {@link FieldWriter#fv} field. This field stores the first element of
- * this list.
+ * {@link FieldWriter#fv} field. This field stores the first element of this
+ * list.
*/
FieldWriter firstField;
/**
* The fields of this class. These fields are stored in a linked list of
* {@link FieldWriter} objects, linked to each other by their
- * {@link FieldWriter#fv} field. This field stores the last element of
- * this list.
+ * {@link FieldWriter#fv} field. This field stores the last element of this
+ * list.
*/
FieldWriter lastField;
@@ -463,8 +468,8 @@ public class ClassWriter extends ClassVisitor {
/**
* The methods of this class. These methods are stored in a linked list of
* {@link MethodWriter} objects, linked to each other by their
- * {@link MethodWriter#mv} field. This field stores the last element of
- * this list.
+ * {@link MethodWriter#mv} field. This field stores the last element of this
+ * list.
*/
MethodWriter lastMethod;
@@ -584,8 +589,10 @@ public class ClassWriter extends ClassVisitor {
/**
* Constructs a new {@link ClassWriter} object.
*
- * @param flags option flags that can be used to modify the default behavior
- * of this class. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. See {@link #COMPUTE_MAXS},
+ * {@link #COMPUTE_FRAMES}.
*/
public ClassWriter(final int flags) {
super(Opcodes.ASM4);
@@ -606,26 +613,32 @@ public class ClassWriter extends ClassVisitor {
* "mostly add" bytecode transformations. These optimizations are the
* following:
*
- * <ul> <li>The constant pool from the original class is copied as is in the
- * new class, which saves time. New constant pool entries will be added at
- * the end if necessary, but unused constant pool entries <i>won't be
- * removed</i>.</li> <li>Methods that are not transformed are copied as is
- * in the new class, directly from the original class bytecode (i.e. without
- * emitting visit events for all the method instructions), which saves a
- * <i>lot</i> of time. Untransformed methods are detected by the fact that
- * the {@link ClassReader} receives {@link MethodVisitor} objects that come
- * from a {@link ClassWriter} (and not from any other {@link ClassVisitor}
- * instance).</li> </ul>
+ * <ul>
+ * <li>The constant pool from the original class is copied as is in the new
+ * class, which saves time. New constant pool entries will be added at the
+ * end if necessary, but unused constant pool entries <i>won't be
+ * removed</i>.</li>
+ * <li>Methods that are not transformed are copied as is in the new class,
+ * directly from the original class bytecode (i.e. without emitting visit
+ * events for all the method instructions), which saves a <i>lot</i> of
+ * time. Untransformed methods are detected by the fact that the
+ * {@link ClassReader} receives {@link MethodVisitor} objects that come from
+ * a {@link ClassWriter} (and not from any other {@link ClassVisitor}
+ * instance).</li>
+ * </ul>
*
- * @param classReader the {@link ClassReader} used to read the original
- * class. It will be used to copy the entire constant pool from the
- * original class and also to copy other fragments of original
- * bytecode where applicable.
- * @param flags option flags that can be used to modify the default behavior
- * of this class. <i>These option flags do not affect methods that
- * are copied as is in the new class. This means that the maximum
- * stack size nor the stack frames will be computed for these
- * methods</i>. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ * @param classReader
+ * the {@link ClassReader} used to read the original class. It
+ * will be used to copy the entire constant pool from the
+ * original class and also to copy other fragments of original
+ * bytecode where applicable.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. <i>These option flags do not affect methods
+ * that are copied as is in the new class. This means that the
+ * maximum stack size nor the stack frames will be computed for
+ * these methods</i>. See {@link #COMPUTE_MAXS},
+ * {@link #COMPUTE_FRAMES}.
*/
public ClassWriter(final ClassReader classReader, final int flags) {
this(flags);
@@ -638,14 +651,9 @@ public class ClassWriter extends ClassVisitor {
// ------------------------------------------------------------------------
@Override
- public final void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public final void visit(final int version, final int access,
+ final String name, final String signature, final String superName,
+ final String[] interfaces) {
this.version = version;
this.access = access;
this.name = newClass(name);
@@ -674,11 +682,8 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public final void visitOuterClass(final String owner, final String name,
+ final String desc) {
enclosingMethodOwner = newClass(owner);
if (name != null && desc != null) {
enclosingMethod = newNameType(name, desc);
@@ -686,10 +691,8 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public final AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -714,12 +717,8 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public final void visitInnerClass(final String name,
+ final String outerName, final String innerName, final int access) {
if (innerClasses == null) {
innerClasses = new ByteVector();
}
@@ -731,32 +730,16 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public final FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
return new FieldWriter(this, access, name, desc, signature, value);
}
@Override
- public final MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
- return new MethodWriter(this,
- access,
- name,
- desc,
- signature,
- exceptions,
- computeMaxs,
- computeFrames);
+ public final MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
+ return new MethodWriter(this, access, name, desc, signature,
+ exceptions, computeMaxs, computeFrames);
}
@Override
@@ -773,7 +756,7 @@ public class ClassWriter extends ClassVisitor {
* @return the bytecode of the class that was build with this class writer.
*/
public byte[] toByteArray() {
- if (index > Short.MAX_VALUE) {
+ if (index > 0xFFFF) {
throw new RuntimeException("Class file too large!");
}
// computes the real size of the bytecode of this class
@@ -793,8 +776,9 @@ public class ClassWriter extends ClassVisitor {
mb = (MethodWriter) mb.mv;
}
int attributeCount = 0;
- if (bootstrapMethods != null) { // we put it as first argument in order
- // to improve a bit ClassReader.copyBootstrapMethods
+ if (bootstrapMethods != null) {
+ // we put it as first attribute in order to improve a bit
+ // ClassReader.copyBootstrapMethods
++attributeCount;
size += 8 + bootstrapMethods.length;
newUTF8("BootstrapMethods");
@@ -824,12 +808,13 @@ public class ClassWriter extends ClassVisitor {
size += 6;
newUTF8("Deprecated");
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- ++attributeCount;
- size += 6;
- newUTF8("Synthetic");
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((version & 0xFFFF) < Opcodes.V1_5
+ || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ ++attributeCount;
+ size += 6;
+ newUTF8("Synthetic");
+ }
}
if (innerClasses != null) {
++attributeCount;
@@ -856,9 +841,8 @@ public class ClassWriter extends ClassVisitor {
ByteVector out = new ByteVector(size);
out.putInt(0xCAFEBABE).putInt(version);
out.putShort(index).putByteArray(pool.data, 0, pool.length);
- int mask = Opcodes.ACC_DEPRECATED
- | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ int mask = Opcodes.ACC_DEPRECATED | ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ACC_SYNTHETIC_ATTRIBUTE) / TO_ACC_SYNTHETIC);
out.putShort(access & ~mask).putShort(name).putShort(superName);
out.putShort(interfaceCount);
for (int i = 0; i < interfaceCount; ++i) {
@@ -877,9 +861,10 @@ public class ClassWriter extends ClassVisitor {
mb = (MethodWriter) mb.mv;
}
out.putShort(attributeCount);
- if (bootstrapMethods != null) { // should be the first class attribute ?
+ if (bootstrapMethods != null) {
out.putShort(newUTF8("BootstrapMethods"));
- out.putInt(bootstrapMethods.length + 2).putShort(bootstrapMethodsCount);
+ out.putInt(bootstrapMethods.length + 2).putShort(
+ bootstrapMethodsCount);
out.putByteArray(bootstrapMethods.data, 0, bootstrapMethods.length);
}
if (ClassReader.SIGNATURES && signature != 0) {
@@ -900,10 +885,11 @@ public class ClassWriter extends ClassVisitor {
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(newUTF8("Deprecated")).putInt(0);
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- out.putShort(newUTF8("Synthetic")).putInt(0);
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((version & 0xFFFF) < Opcodes.V1_5
+ || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ out.putShort(newUTF8("Synthetic")).putInt(0);
+ }
}
if (innerClasses != null) {
out.putShort(newUTF8("InnerClasses"));
@@ -937,10 +923,11 @@ public class ClassWriter extends ClassVisitor {
* Adds a number or string constant to the constant pool of the class being
* build. Does nothing if the constant pool already contains a similar item.
*
- * @param cst the value of the constant to be added to the constant pool.
- * This parameter must be an {@link Integer}, a {@link Float}, a
- * {@link Long}, a {@link Double}, a {@link String} or a
- * {@link Type}.
+ * @param cst
+ * the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double}, a {@link String} or a
+ * {@link Type}.
* @return a new or already existing constant item with the given value.
*/
Item newConstItem(final Object cst) {
@@ -973,12 +960,12 @@ public class ClassWriter extends ClassVisitor {
} else if (cst instanceof Type) {
Type t = (Type) cst;
int s = t.getSort();
- if (s == Type.ARRAY) {
- return newClassItem(t.getDescriptor());
- } else if (s == Type.OBJECT) {
+ if (s == Type.OBJECT) {
return newClassItem(t.getInternalName());
- } else { // s == Type.METHOD
+ } else if (s == Type.METHOD) {
return newMethodTypeItem(t.getDescriptor());
+ } else { // s == primitive type or array
+ return newClassItem(t.getDescriptor());
}
} else if (cst instanceof Handle) {
Handle h = (Handle) cst;
@@ -994,9 +981,10 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param cst the value of the constant to be added to the constant pool.
- * This parameter must be an {@link Integer}, a {@link Float}, a
- * {@link Long}, a {@link Double} or a {@link String}.
+ * @param cst
+ * the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double} or a {@link String}.
* @return the index of a new or already existing constant item with the
* given value.
*/
@@ -1010,7 +998,8 @@ public class ClassWriter extends ClassVisitor {
* method is intended for {@link Attribute} sub classes, and is normally not
* needed by class generators or adapters.</i>
*
- * @param value the String value.
+ * @param value
+ * the String value.
* @return the index of a new or already existing UTF8 item.
*/
public int newUTF8(final String value) {
@@ -1030,7 +1019,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param value the internal name of the class.
+ * @param value
+ * the internal name of the class.
* @return a new or already existing class reference item.
*/
Item newClassItem(final String value) {
@@ -1050,7 +1040,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param value the internal name of the class.
+ * @param value
+ * the internal name of the class.
* @return the index of a new or already existing class reference item.
*/
public int newClass(final String value) {
@@ -1063,7 +1054,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param methodDesc method descriptor of the method type.
+ * @param methodDesc
+ * method descriptor of the method type.
* @return a new or already existing method type reference item.
*/
Item newMethodTypeItem(final String methodDesc) {
@@ -1083,7 +1075,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param methodDesc method descriptor of the method type.
+ * @param methodDesc
+ * method descriptor of the method type.
* @return the index of a new or already existing method type reference
* item.
*/
@@ -1097,33 +1090,34 @@ public class ClassWriter extends ClassVisitor {
* intended for {@link Attribute} sub classes, and is normally not needed by
* class generators or adapters.</i>
*
- * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
- * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
- * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
- * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
- * {@link Opcodes#H_NEWINVOKESPECIAL} or
- * {@link Opcodes#H_INVOKEINTERFACE}.
- * @param owner the internal name of the field or method owner class.
- * @param name the name of the field or method.
- * @param desc the descriptor of the field or method.
+ * @param tag
+ * the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner
+ * the internal name of the field or method owner class.
+ * @param name
+ * the name of the field or method.
+ * @param desc
+ * the descriptor of the field or method.
* @return a new or an already existing method type reference item.
*/
- Item newHandleItem(
- final int tag,
- final String owner,
- final String name,
- final String desc)
- {
+ Item newHandleItem(final int tag, final String owner, final String name,
+ final String desc) {
key4.set(HANDLE_BASE + tag, owner, name, desc);
Item result = get(key4);
if (result == null) {
if (tag <= Opcodes.H_PUTSTATIC) {
put112(HANDLE, tag, newField(owner, name, desc));
} else {
- put112(HANDLE, tag, newMethod(owner,
- name,
- desc,
- tag == Opcodes.H_INVOKEINTERFACE));
+ put112(HANDLE,
+ tag,
+ newMethod(owner, name, desc,
+ tag == Opcodes.H_INVOKEINTERFACE));
}
result = new Item(index++, key4);
put(result);
@@ -1132,29 +1126,30 @@ public class ClassWriter extends ClassVisitor {
}
/**
- * Adds a handle to the constant pool of the class being
- * build. Does nothing if the constant pool already contains a similar item.
- * <i>This method is intended for {@link Attribute} sub classes, and is
- * normally not needed by class generators or adapters.</i>
+ * Adds a handle to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
*
- * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
- * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
- * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
- * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
- * {@link Opcodes#H_NEWINVOKESPECIAL} or
- * {@link Opcodes#H_INVOKEINTERFACE}.
- * @param owner the internal name of the field or method owner class.
- * @param name the name of the field or method.
- * @param desc the descriptor of the field or method.
+ * @param tag
+ * the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner
+ * the internal name of the field or method owner class.
+ * @param name
+ * the name of the field or method.
+ * @param desc
+ * the descriptor of the field or method.
* @return the index of a new or already existing method type reference
* item.
*/
- public int newHandle(
- final int tag,
- final String owner,
- final String name,
- final String desc)
- {
+ public int newHandle(final int tag, final String owner, final String name,
+ final String desc) {
return newHandleItem(tag, owner, name, desc).index;
}
@@ -1164,19 +1159,19 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param name name of the invoked method.
- * @param desc descriptor of the invoke method.
- * @param bsm the bootstrap method.
- * @param bsmArgs the bootstrap method constant arguments.
+ * @param name
+ * name of the invoked method.
+ * @param desc
+ * descriptor of the invoke method.
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the bootstrap method constant arguments.
*
* @return a new or an already existing invokedynamic type reference item.
*/
- Item newInvokeDynamicItem(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ Item newInvokeDynamicItem(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
// cache for performance
ByteVector bootstrapMethods = this.bootstrapMethods;
if (bootstrapMethods == null) {
@@ -1186,9 +1181,7 @@ public class ClassWriter extends ClassVisitor {
int position = bootstrapMethods.length; // record current position
int hashCode = bsm.hashCode();
- bootstrapMethods.putShort(newHandle(bsm.tag,
- bsm.owner,
- bsm.name,
+ bootstrapMethods.putShort(newHandle(bsm.tag, bsm.owner, bsm.name,
bsm.desc));
int argsLength = bsmArgs.length;
@@ -1250,20 +1243,20 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param name name of the invoked method.
- * @param desc descriptor of the invoke method.
- * @param bsm the bootstrap method.
- * @param bsmArgs the bootstrap method constant arguments.
+ * @param name
+ * name of the invoked method.
+ * @param desc
+ * descriptor of the invoke method.
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the bootstrap method constant arguments.
*
- * @return the index of a new or already existing invokedynamic
- * reference item.
- */
- public int newInvokeDynamic(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ * @return the index of a new or already existing invokedynamic reference
+ * item.
+ */
+ public int newInvokeDynamic(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
return newInvokeDynamicItem(name, desc, bsm, bsmArgs).index;
}
@@ -1271,13 +1264,15 @@ public class ClassWriter extends ClassVisitor {
* Adds a field reference to the constant pool of the class being build.
* Does nothing if the constant pool already contains a similar item.
*
- * @param owner the internal name of the field's owner class.
- * @param name the field's name.
- * @param desc the field's descriptor.
+ * @param owner
+ * the internal name of the field's owner class.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor.
* @return a new or already existing field reference item.
*/
- Item newFieldItem(final String owner, final String name, final String desc)
- {
+ Item newFieldItem(final String owner, final String name, final String desc) {
key3.set(FIELD, owner, name, desc);
Item result = get(key3);
if (result == null) {
@@ -1294,13 +1289,15 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param owner the internal name of the field's owner class.
- * @param name the field's name.
- * @param desc the field's descriptor.
+ * @param owner
+ * the internal name of the field's owner class.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor.
* @return the index of a new or already existing field reference item.
*/
- public int newField(final String owner, final String name, final String desc)
- {
+ public int newField(final String owner, final String name, final String desc) {
return newFieldItem(owner, name, desc).index;
}
@@ -1308,18 +1305,18 @@ public class ClassWriter extends ClassVisitor {
* Adds a method reference to the constant pool of the class being build.
* Does nothing if the constant pool already contains a similar item.
*
- * @param owner the internal name of the method's owner class.
- * @param name the method's name.
- * @param desc the method's descriptor.
- * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @param owner
+ * the internal name of the method's owner class.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor.
+ * @param itf
+ * <tt>true</tt> if <tt>owner</tt> is an interface.
* @return a new or already existing method reference item.
*/
- Item newMethodItem(
- final String owner,
- final String name,
- final String desc,
- final boolean itf)
- {
+ Item newMethodItem(final String owner, final String name,
+ final String desc, final boolean itf) {
int type = itf ? IMETH : METH;
key3.set(type, owner, name, desc);
Item result = get(key3);
@@ -1337,18 +1334,18 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param owner the internal name of the method's owner class.
- * @param name the method's name.
- * @param desc the method's descriptor.
- * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @param owner
+ * the internal name of the method's owner class.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor.
+ * @param itf
+ * <tt>true</tt> if <tt>owner</tt> is an interface.
* @return the index of a new or already existing method reference item.
*/
- public int newMethod(
- final String owner,
- final String name,
- final String desc,
- final boolean itf)
- {
+ public int newMethod(final String owner, final String name,
+ final String desc, final boolean itf) {
return newMethodItem(owner, name, desc, itf).index;
}
@@ -1356,7 +1353,8 @@ public class ClassWriter extends ClassVisitor {
* Adds an integer to the constant pool of the class being build. Does
* nothing if the constant pool already contains a similar item.
*
- * @param value the int value.
+ * @param value
+ * the int value.
* @return a new or already existing int item.
*/
Item newInteger(final int value) {
@@ -1374,7 +1372,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a float to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the float value.
+ * @param value
+ * the float value.
* @return a new or already existing float item.
*/
Item newFloat(final float value) {
@@ -1392,7 +1391,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a long to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the long value.
+ * @param value
+ * the long value.
* @return a new or already existing long item.
*/
Item newLong(final long value) {
@@ -1411,7 +1411,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a double to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the double value.
+ * @param value
+ * the double value.
* @return a new or already existing double item.
*/
Item newDouble(final double value) {
@@ -1430,7 +1431,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a string to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the String value.
+ * @param value
+ * the String value.
* @return a new or already existing string item.
*/
private Item newString(final String value) {
@@ -1450,8 +1452,10 @@ public class ClassWriter extends ClassVisitor {
* method is intended for {@link Attribute} sub classes, and is normally not
* needed by class generators or adapters.</i>
*
- * @param name a name.
- * @param desc a type descriptor.
+ * @param name
+ * a name.
+ * @param desc
+ * a type descriptor.
* @return the index of a new or already existing name and type item.
*/
public int newNameType(final String name, final String desc) {
@@ -1462,8 +1466,10 @@ public class ClassWriter extends ClassVisitor {
* Adds a name and type to the constant pool of the class being build. Does
* nothing if the constant pool already contains a similar item.
*
- * @param name a name.
- * @param desc a type descriptor.
+ * @param name
+ * a name.
+ * @param desc
+ * a type descriptor.
* @return a new or already existing name and type item.
*/
Item newNameTypeItem(final String name, final String desc) {
@@ -1481,7 +1487,8 @@ public class ClassWriter extends ClassVisitor {
* Adds the given internal name to {@link #typeTable} and returns its index.
* Does nothing if the type table already contains this internal name.
*
- * @param type the internal name to be added to the type table.
+ * @param type
+ * the internal name to be added to the type table.
* @return the index of this internal name in the type table.
*/
int addType(final String type) {
@@ -1498,9 +1505,11 @@ public class ClassWriter extends ClassVisitor {
* index. This method is used for UNINITIALIZED types, made of an internal
* name and a bytecode offset.
*
- * @param type the internal name to be added to the type table.
- * @param offset the bytecode offset of the NEW instruction that created
- * this UNINITIALIZED type value.
+ * @param type
+ * the internal name to be added to the type table.
+ * @param offset
+ * the bytecode offset of the NEW instruction that created this
+ * UNINITIALIZED type value.
* @return the index of this internal name in the type table.
*/
int addUninitializedType(final String type, final int offset) {
@@ -1518,7 +1527,8 @@ public class ClassWriter extends ClassVisitor {
/**
* Adds the given Item to {@link #typeTable}.
*
- * @param item the value to be added to the type table.
+ * @param item
+ * the value to be added to the type table.
* @return the added Item, which a new Item instance with the same value as
* the given Item.
*/
@@ -1544,8 +1554,10 @@ public class ClassWriter extends ClassVisitor {
* {@link #items} hash table to speedup future calls with the same
* parameters.
*
- * @param type1 index of an internal name in {@link #typeTable}.
- * @param type2 index of an internal name in {@link #typeTable}.
+ * @param type1
+ * index of an internal name in {@link #typeTable}.
+ * @param type2
+ * index of an internal name in {@link #typeTable}.
* @return the index of the common super type of the two given types.
*/
int getMergedType(final int type1, final int type2) {
@@ -1572,13 +1584,14 @@ public class ClassWriter extends ClassVisitor {
* that is currently being generated by this ClassWriter, which can of
* course not be loaded since it is under construction.
*
- * @param type1 the internal name of a class.
- * @param type2 the internal name of another class.
+ * @param type1
+ * the internal name of a class.
+ * @param type2
+ * the internal name of another class.
* @return the internal name of the common super class of the two given
* classes.
*/
- protected String getCommonSuperClass(final String type1, final String type2)
- {
+ protected String getCommonSuperClass(final String type1, final String type2) {
Class<?> c, d;
ClassLoader classLoader = getClass().getClassLoader();
try {
@@ -1607,7 +1620,8 @@ public class ClassWriter extends ClassVisitor {
* Returns the constant pool's hash table item which is equal to the given
* item.
*
- * @param key a constant pool item.
+ * @param key
+ * a constant pool item.
* @return the constant pool's hash table item which is equal to the given
* item, or <tt>null</tt> if there is no such item.
*/
@@ -1623,7 +1637,8 @@ public class ClassWriter extends ClassVisitor {
* Puts the given item in the constant pool's hash table. The hash table
* <i>must</i> not already contains this item.
*
- * @param i the item to be added to the constant pool's hash table.
+ * @param i
+ * the item to be added to the constant pool's hash table.
*/
private void put(final Item i) {
if (index + typeCount > threshold) {
@@ -1651,9 +1666,12 @@ public class ClassWriter extends ClassVisitor {
/**
* Puts one byte and two shorts into the constant pool.
*
- * @param b a byte.
- * @param s1 a short.
- * @param s2 another short.
+ * @param b
+ * a byte.
+ * @param s1
+ * a short.
+ * @param s2
+ * another short.
*/
private void put122(final int b, final int s1, final int s2) {
pool.put12(b, s1).putShort(s2);
@@ -1662,9 +1680,12 @@ public class ClassWriter extends ClassVisitor {
/**
* Puts two bytes and one short into the constant pool.
*
- * @param b1 a byte.
- * @param b2 another byte.
- * @param s a short.
+ * @param b1
+ * a byte.
+ * @param b2
+ * another byte.
+ * @param s
+ * a short.
*/
private void put112(final int b1, final int b2, final int s) {
pool.put11(b1, b2).putShort(s);
diff --git a/src/asm/scala/tools/asm/Context.java b/src/asm/scala/tools/asm/Context.java
new file mode 100644
index 0000000000..7b3a2ad9dd
--- /dev/null
+++ b/src/asm/scala/tools/asm/Context.java
@@ -0,0 +1,110 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm;
+
+/**
+ * Information about a class being parsed in a {@link ClassReader}.
+ *
+ * @author Eric Bruneton
+ */
+class Context {
+
+ /**
+ * Prototypes of the attributes that must be parsed for this class.
+ */
+ Attribute[] attrs;
+
+ /**
+ * The {@link ClassReader} option flags for the parsing of this class.
+ */
+ int flags;
+
+ /**
+ * The buffer used to read strings.
+ */
+ char[] buffer;
+
+ /**
+ * The start index of each bootstrap method.
+ */
+ int[] bootstrapMethods;
+
+ /**
+ * The access flags of the method currently being parsed.
+ */
+ int access;
+
+ /**
+ * The name of the method currently being parsed.
+ */
+ String name;
+
+ /**
+ * The descriptor of the method currently being parsed.
+ */
+ String desc;
+
+ /**
+ * The offset of the latest stack map frame that has been parsed.
+ */
+ int offset;
+
+ /**
+ * The encoding of the latest stack map frame that has been parsed.
+ */
+ int mode;
+
+ /**
+ * The number of locals in the latest stack map frame that has been parsed.
+ */
+ int localCount;
+
+ /**
+ * The number locals in the latest stack map frame that has been parsed,
+ * minus the number of locals in the previous frame.
+ */
+ int localDiff;
+
+ /**
+ * The local values of the latest stack map frame that has been parsed.
+ */
+ Object[] local;
+
+ /**
+ * The stack size of the latest stack map frame that has been parsed.
+ */
+ int stackCount;
+
+ /**
+ * The stack values of the latest stack map frame that has been parsed.
+ */
+ Object[] stack;
+}
diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java
index 9ac0f6236f..9171f331e5 100644
--- a/src/asm/scala/tools/asm/FieldVisitor.java
+++ b/src/asm/scala/tools/asm/FieldVisitor.java
@@ -30,9 +30,9 @@
package scala.tools.asm;
/**
- * A visitor to visit a Java field. The methods of this class must be called
- * in the following order: ( <tt>visitAnnotation</tt> |
- * <tt>visitAttribute</tt> )* <tt>visitEnd</tt>.
+ * A visitor to visit a Java field. The methods of this class must be called in
+ * the following order: ( <tt>visitAnnotation</tt> | <tt>visitAttribute</tt> )*
+ * <tt>visitEnd</tt>.
*
* @author Eric Bruneton
*/
@@ -53,8 +53,9 @@ public abstract class FieldVisitor {
/**
* Constructs a new {@link FieldVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public FieldVisitor(final int api) {
this(api, null);
@@ -63,15 +64,17 @@ public abstract class FieldVisitor {
/**
* Constructs a new {@link FieldVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param fv the field visitor to which this visitor must delegate method
- * calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv
+ * the field visitor to which this visitor must delegate method
+ * calls. May be null.
*/
public FieldVisitor(final int api, final FieldVisitor fv) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.fv = fv;
}
@@ -79,8 +82,10 @@ public abstract class FieldVisitor {
/**
* Visits an annotation of the field.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
@@ -94,7 +99,8 @@ public abstract class FieldVisitor {
/**
* Visits a non standard attribute of the field.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(Attribute attr) {
if (fv != null) {
diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java
index 45ef6d0df3..02c6059b91 100644
--- a/src/asm/scala/tools/asm/FieldWriter.java
+++ b/src/asm/scala/tools/asm/FieldWriter.java
@@ -92,21 +92,21 @@ final class FieldWriter extends FieldVisitor {
/**
* Constructs a new {@link FieldWriter}.
*
- * @param cw the class writer to which this field must be added.
- * @param access the field's access flags (see {@link Opcodes}).
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link Type}).
- * @param signature the field's signature. May be <tt>null</tt>.
- * @param value the field's constant value. May be <tt>null</tt>.
+ * @param cw
+ * the class writer to which this field must be added.
+ * @param access
+ * the field's access flags (see {@link Opcodes}).
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link Type}).
+ * @param signature
+ * the field's signature. May be <tt>null</tt>.
+ * @param value
+ * the field's constant value. May be <tt>null</tt>.
*/
- FieldWriter(
- final ClassWriter cw,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ FieldWriter(final ClassWriter cw, final int access, final String name,
+ final String desc, final String signature, final Object value) {
super(Opcodes.ASM4);
if (cw.firstField == null) {
cw.firstField = this;
@@ -131,10 +131,8 @@ final class FieldWriter extends FieldVisitor {
// ------------------------------------------------------------------------
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -177,11 +175,12 @@ final class FieldWriter extends FieldVisitor {
cw.newUTF8("ConstantValue");
size += 8;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- cw.newUTF8("Synthetic");
- size += 6;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
cw.newUTF8("Deprecated");
@@ -208,21 +207,23 @@ final class FieldWriter extends FieldVisitor {
/**
* Puts the content of this field into the given byte vector.
*
- * @param out where the content of this field must be put.
+ * @param out
+ * where the content of this field must be put.
*/
void put(final ByteVector out) {
- int mask = Opcodes.ACC_DEPRECATED
- | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
+ int mask = Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
out.putShort(access & ~mask).putShort(name).putShort(desc);
int attributeCount = 0;
if (value != 0) {
++attributeCount;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- ++attributeCount;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ ++attributeCount;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
++attributeCount;
@@ -244,10 +245,11 @@ final class FieldWriter extends FieldVisitor {
out.putShort(cw.newUTF8("ConstantValue"));
out.putInt(2).putShort(value);
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(cw.newUTF8("Deprecated")).putInt(0);
diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java
index 387b56796d..bcc3e8450b 100644
--- a/src/asm/scala/tools/asm/Frame.java
+++ b/src/asm/scala/tools/asm/Frame.java
@@ -80,13 +80,13 @@ final class Frame {
* table contains only internal type names (array type descriptors are
* forbidden - dimensions must be represented through the DIM field).
*
- * The LONG and DOUBLE types are always represented by using two slots (LONG +
- * TOP or DOUBLE + TOP), for local variable types as well as in the operand
- * stack. This is necessary to be able to simulate DUPx_y instructions,
- * whose effect would be dependent on the actual type values if types were
- * always represented by a single slot in the stack (and this is not
- * possible, since actual type values are not always known - cf LOCAL and
- * STACK type kinds).
+ * The LONG and DOUBLE types are always represented by using two slots (LONG
+ * + TOP or DOUBLE + TOP), for local variable types as well as in the
+ * operand stack. This is necessary to be able to simulate DUPx_y
+ * instructions, whose effect would be dependent on the actual type values
+ * if types were always represented by a single slot in the stack (and this
+ * is not possible, since actual type values are not always known - cf LOCAL
+ * and STACK type kinds).
*/
/**
@@ -117,9 +117,9 @@ final class Frame {
/**
* Flag used for LOCAL and STACK types. Indicates that if this type happens
* to be a long or double type (during the computations of input frames),
- * then it must be set to TOP because the second word of this value has
- * been reused to store other data in the basic block. Hence the first word
- * no longer stores a valid long or double value.
+ * then it must be set to TOP because the second word of this value has been
+ * reused to store other data in the basic block. Hence the first word no
+ * longer stores a valid long or double value.
*/
static final int TOP_IF_LONG_OR_DOUBLE = 0x800000;
@@ -523,7 +523,8 @@ final class Frame {
/**
* Returns the output frame local variable type at the given index.
*
- * @param local the index of the local that must be returned.
+ * @param local
+ * the index of the local that must be returned.
* @return the output frame local variable type at the given index.
*/
private int get(final int local) {
@@ -545,8 +546,10 @@ final class Frame {
/**
* Sets the output frame local variable type at the given index.
*
- * @param local the index of the local that must be set.
- * @param type the value of the local that must be set.
+ * @param local
+ * the index of the local that must be set.
+ * @param type
+ * the value of the local that must be set.
*/
private void set(final int local, final int type) {
// creates and/or resizes the output local variables array if necessary
@@ -566,7 +569,8 @@ final class Frame {
/**
* Pushes a new type onto the output frame stack.
*
- * @param type the type that must be pushed.
+ * @param type
+ * the type that must be pushed.
*/
private void push(final int type) {
// creates and/or resizes the output stack array if necessary
@@ -591,10 +595,12 @@ final class Frame {
/**
* Pushes a new type onto the output frame stack.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param desc the descriptor of the type to be pushed. Can also be a method
- * descriptor (in this case this method pushes its return type onto
- * the output frame stack).
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param desc
+ * the descriptor of the type to be pushed. Can also be a method
+ * descriptor (in this case this method pushes its return type
+ * onto the output frame stack).
*/
private void push(final ClassWriter cw, final String desc) {
int type = type(cw, desc);
@@ -609,72 +615,74 @@ final class Frame {
/**
* Returns the int encoding of the given type.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param desc a type descriptor.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param desc
+ * a type descriptor.
* @return the int encoding of the given type.
*/
private static int type(final ClassWriter cw, final String desc) {
String t;
int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0;
switch (desc.charAt(index)) {
- case 'V':
- return 0;
+ case 'V':
+ return 0;
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ return INTEGER;
+ case 'F':
+ return FLOAT;
+ case 'J':
+ return LONG;
+ case 'D':
+ return DOUBLE;
+ case 'L':
+ // stores the internal name, not the descriptor!
+ t = desc.substring(index + 1, desc.length() - 1);
+ return OBJECT | cw.addType(t);
+ // case '[':
+ default:
+ // extracts the dimensions and the element type
+ int data;
+ int dims = index + 1;
+ while (desc.charAt(dims) == '[') {
+ ++dims;
+ }
+ switch (desc.charAt(dims)) {
case 'Z':
+ data = BOOLEAN;
+ break;
case 'C':
+ data = CHAR;
+ break;
case 'B':
+ data = BYTE;
+ break;
case 'S':
+ data = SHORT;
+ break;
case 'I':
- return INTEGER;
+ data = INTEGER;
+ break;
case 'F':
- return FLOAT;
+ data = FLOAT;
+ break;
case 'J':
- return LONG;
+ data = LONG;
+ break;
case 'D':
- return DOUBLE;
- case 'L':
- // stores the internal name, not the descriptor!
- t = desc.substring(index + 1, desc.length() - 1);
- return OBJECT | cw.addType(t);
- // case '[':
+ data = DOUBLE;
+ break;
+ // case 'L':
default:
- // extracts the dimensions and the element type
- int data;
- int dims = index + 1;
- while (desc.charAt(dims) == '[') {
- ++dims;
- }
- switch (desc.charAt(dims)) {
- case 'Z':
- data = BOOLEAN;
- break;
- case 'C':
- data = CHAR;
- break;
- case 'B':
- data = BYTE;
- break;
- case 'S':
- data = SHORT;
- break;
- case 'I':
- data = INTEGER;
- break;
- case 'F':
- data = FLOAT;
- break;
- case 'J':
- data = LONG;
- break;
- case 'D':
- data = DOUBLE;
- break;
- // case 'L':
- default:
- // stores the internal name, not the descriptor
- t = desc.substring(dims + 1, desc.length() - 1);
- data = OBJECT | cw.addType(t);
- }
- return (dims - index) << 28 | data;
+ // stores the internal name, not the descriptor
+ t = desc.substring(dims + 1, desc.length() - 1);
+ data = OBJECT | cw.addType(t);
+ }
+ return (dims - index) << 28 | data;
}
}
@@ -695,7 +703,8 @@ final class Frame {
/**
* Pops the given number of types from the output frame stack.
*
- * @param elements the number of types that must be popped.
+ * @param elements
+ * the number of types that must be popped.
*/
private void pop(final int elements) {
if (outputStackTop >= elements) {
@@ -712,9 +721,10 @@ final class Frame {
/**
* Pops a type from the output frame stack.
*
- * @param desc the descriptor of the type to be popped. Can also be a method
- * descriptor (in this case this method pops the types corresponding
- * to the method arguments).
+ * @param desc
+ * the descriptor of the type to be popped. Can also be a method
+ * descriptor (in this case this method pops the types
+ * corresponding to the method arguments).
*/
private void pop(final String desc) {
char c = desc.charAt(0);
@@ -731,7 +741,8 @@ final class Frame {
* Adds a new type to the list of types on which a constructor is invoked in
* the basic block.
*
- * @param var a type on a which a constructor is invoked.
+ * @param var
+ * a type on a which a constructor is invoked.
*/
private void init(final int var) {
// creates and/or resizes the initializations array if necessary
@@ -752,8 +763,10 @@ final class Frame {
* Replaces the given type with the appropriate type if it is one of the
* types on which a constructor is invoked in the basic block.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param t a type
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param t
+ * a type
* @return t or, if t is one of the types on which a constructor is invoked
* in the basic block, the type corresponding to this constructor.
*/
@@ -787,17 +800,17 @@ final class Frame {
* Initializes the input frame of the first basic block from the method
* descriptor.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param access the access flags of the method to which this label belongs.
- * @param args the formal parameter types of this method.
- * @param maxLocals the maximum number of local variables of this method.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param access
+ * the access flags of the method to which this label belongs.
+ * @param args
+ * the formal parameter types of this method.
+ * @param maxLocals
+ * the maximum number of local variables of this method.
*/
- void initInputFrame(
- final ClassWriter cw,
- final int access,
- final Type[] args,
- final int maxLocals)
- {
+ void initInputFrame(final ClassWriter cw, final int access,
+ final Type[] args, final int maxLocals) {
inputLocals = new int[maxLocals];
inputStack = new int[0];
int i = 0;
@@ -823,435 +836,435 @@ final class Frame {
/**
* Simulates the action of the given instruction on the output stack frame.
*
- * @param opcode the opcode of the instruction.
- * @param arg the operand of the instruction, if any.
- * @param cw the class writer to which this label belongs.
- * @param item the operand of the instructions, if any.
+ * @param opcode
+ * the opcode of the instruction.
+ * @param arg
+ * the operand of the instruction, if any.
+ * @param cw
+ * the class writer to which this label belongs.
+ * @param item
+ * the operand of the instructions, if any.
*/
- void execute(
- final int opcode,
- final int arg,
- final ClassWriter cw,
- final Item item)
- {
+ void execute(final int opcode, final int arg, final ClassWriter cw,
+ final Item item) {
int t1, t2, t3, t4;
switch (opcode) {
- case Opcodes.NOP:
- case Opcodes.INEG:
- case Opcodes.LNEG:
- case Opcodes.FNEG:
- case Opcodes.DNEG:
- case Opcodes.I2B:
- case Opcodes.I2C:
- case Opcodes.I2S:
- case Opcodes.GOTO:
- case Opcodes.RETURN:
- break;
- case Opcodes.ACONST_NULL:
- push(NULL);
- break;
- case Opcodes.ICONST_M1:
- case Opcodes.ICONST_0:
- case Opcodes.ICONST_1:
- case Opcodes.ICONST_2:
- case Opcodes.ICONST_3:
- case Opcodes.ICONST_4:
- case Opcodes.ICONST_5:
- case Opcodes.BIPUSH:
- case Opcodes.SIPUSH:
- case Opcodes.ILOAD:
+ case Opcodes.NOP:
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ case Opcodes.GOTO:
+ case Opcodes.RETURN:
+ break;
+ case Opcodes.ACONST_NULL:
+ push(NULL);
+ break;
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.ILOAD:
+ push(INTEGER);
+ break;
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.LLOAD:
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.FLOAD:
+ push(FLOAT);
+ break;
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.DLOAD:
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LDC:
+ switch (item.type) {
+ case ClassWriter.INT:
push(INTEGER);
break;
- case Opcodes.LCONST_0:
- case Opcodes.LCONST_1:
- case Opcodes.LLOAD:
+ case ClassWriter.LONG:
push(LONG);
push(TOP);
break;
- case Opcodes.FCONST_0:
- case Opcodes.FCONST_1:
- case Opcodes.FCONST_2:
- case Opcodes.FLOAD:
+ case ClassWriter.FLOAT:
push(FLOAT);
break;
- case Opcodes.DCONST_0:
- case Opcodes.DCONST_1:
- case Opcodes.DLOAD:
+ case ClassWriter.DOUBLE:
push(DOUBLE);
push(TOP);
break;
- case Opcodes.LDC:
- switch (item.type) {
- case ClassWriter.INT:
- push(INTEGER);
- break;
- case ClassWriter.LONG:
- push(LONG);
- push(TOP);
- break;
- case ClassWriter.FLOAT:
- push(FLOAT);
- break;
- case ClassWriter.DOUBLE:
- push(DOUBLE);
- push(TOP);
- break;
- case ClassWriter.CLASS:
- push(OBJECT | cw.addType("java/lang/Class"));
- break;
- case ClassWriter.STR:
- push(OBJECT | cw.addType("java/lang/String"));
- break;
- case ClassWriter.MTYPE:
- push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
- break;
- // case ClassWriter.HANDLE_BASE + [1..9]:
- default:
- push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
- }
- break;
- case Opcodes.ALOAD:
- push(get(arg));
- break;
- case Opcodes.IALOAD:
- case Opcodes.BALOAD:
- case Opcodes.CALOAD:
- case Opcodes.SALOAD:
- pop(2);
- push(INTEGER);
- break;
- case Opcodes.LALOAD:
- case Opcodes.D2L:
- pop(2);
- push(LONG);
- push(TOP);
+ case ClassWriter.CLASS:
+ push(OBJECT | cw.addType("java/lang/Class"));
break;
- case Opcodes.FALOAD:
- pop(2);
- push(FLOAT);
+ case ClassWriter.STR:
+ push(OBJECT | cw.addType("java/lang/String"));
break;
- case Opcodes.DALOAD:
- case Opcodes.L2D:
- pop(2);
- push(DOUBLE);
- push(TOP);
+ case ClassWriter.MTYPE:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
break;
- case Opcodes.AALOAD:
- pop(1);
- t1 = pop();
- push(ELEMENT_OF + t1);
- break;
- case Opcodes.ISTORE:
- case Opcodes.FSTORE:
- case Opcodes.ASTORE:
- t1 = pop();
- set(arg, t1);
- if (arg > 0) {
- t2 = get(arg - 1);
- // if t2 is of kind STACK or LOCAL we cannot know its size!
- if (t2 == LONG || t2 == DOUBLE) {
- set(arg - 1, TOP);
- } else if ((t2 & KIND) != BASE) {
- set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
- }
+ // case ClassWriter.HANDLE_BASE + [1..9]:
+ default:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
+ }
+ break;
+ case Opcodes.ALOAD:
+ push(get(arg));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LALOAD:
+ case Opcodes.D2L:
+ pop(2);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FALOAD:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DALOAD:
+ case Opcodes.L2D:
+ pop(2);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.AALOAD:
+ pop(1);
+ t1 = pop();
+ push(ELEMENT_OF + t1);
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.ASTORE:
+ t1 = pop();
+ set(arg, t1);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
}
- break;
- case Opcodes.LSTORE:
- case Opcodes.DSTORE:
- pop(1);
- t1 = pop();
- set(arg, t1);
- set(arg + 1, TOP);
- if (arg > 0) {
- t2 = get(arg - 1);
- // if t2 is of kind STACK or LOCAL we cannot know its size!
- if (t2 == LONG || t2 == DOUBLE) {
- set(arg - 1, TOP);
- } else if ((t2 & KIND) != BASE) {
- set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
- }
+ }
+ break;
+ case Opcodes.LSTORE:
+ case Opcodes.DSTORE:
+ pop(1);
+ t1 = pop();
+ set(arg, t1);
+ set(arg + 1, TOP);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
}
- break;
- case Opcodes.IASTORE:
- case Opcodes.BASTORE:
- case Opcodes.CASTORE:
- case Opcodes.SASTORE:
- case Opcodes.FASTORE:
- case Opcodes.AASTORE:
- pop(3);
- break;
- case Opcodes.LASTORE:
- case Opcodes.DASTORE:
- pop(4);
- break;
- case Opcodes.POP:
- case Opcodes.IFEQ:
- case Opcodes.IFNE:
- case Opcodes.IFLT:
- case Opcodes.IFGE:
- case Opcodes.IFGT:
- case Opcodes.IFLE:
- case Opcodes.IRETURN:
- case Opcodes.FRETURN:
- case Opcodes.ARETURN:
- case Opcodes.TABLESWITCH:
- case Opcodes.LOOKUPSWITCH:
- case Opcodes.ATHROW:
- case Opcodes.MONITORENTER:
- case Opcodes.MONITOREXIT:
- case Opcodes.IFNULL:
- case Opcodes.IFNONNULL:
- pop(1);
- break;
- case Opcodes.POP2:
- case Opcodes.IF_ICMPEQ:
- case Opcodes.IF_ICMPNE:
- case Opcodes.IF_ICMPLT:
- case Opcodes.IF_ICMPGE:
- case Opcodes.IF_ICMPGT:
- case Opcodes.IF_ICMPLE:
- case Opcodes.IF_ACMPEQ:
- case Opcodes.IF_ACMPNE:
- case Opcodes.LRETURN:
- case Opcodes.DRETURN:
- pop(2);
- break;
- case Opcodes.DUP:
- t1 = pop();
- push(t1);
- push(t1);
- break;
- case Opcodes.DUP_X1:
- t1 = pop();
- t2 = pop();
- push(t1);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP_X2:
- t1 = pop();
- t2 = pop();
- t3 = pop();
- push(t1);
- push(t3);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP2:
- t1 = pop();
- t2 = pop();
- push(t2);
- push(t1);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP2_X1:
- t1 = pop();
- t2 = pop();
- t3 = pop();
- push(t2);
- push(t1);
- push(t3);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP2_X2:
- t1 = pop();
- t2 = pop();
- t3 = pop();
- t4 = pop();
- push(t2);
- push(t1);
- push(t4);
- push(t3);
- push(t2);
- push(t1);
- break;
- case Opcodes.SWAP:
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.AASTORE:
+ pop(3);
+ break;
+ case Opcodes.LASTORE:
+ case Opcodes.DASTORE:
+ pop(4);
+ break;
+ case Opcodes.POP:
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ case Opcodes.IRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.ARETURN:
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ case Opcodes.ATHROW:
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ pop(1);
+ break;
+ case Opcodes.POP2:
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ case Opcodes.LRETURN:
+ case Opcodes.DRETURN:
+ pop(2);
+ break;
+ case Opcodes.DUP:
+ t1 = pop();
+ push(t1);
+ push(t1);
+ break;
+ case Opcodes.DUP_X1:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2:
+ t1 = pop();
+ t2 = pop();
+ push(t2);
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X1:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t2);
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ t4 = pop();
+ push(t2);
+ push(t1);
+ push(t4);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.SWAP:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ break;
+ case Opcodes.IADD:
+ case Opcodes.ISUB:
+ case Opcodes.IMUL:
+ case Opcodes.IDIV:
+ case Opcodes.IREM:
+ case Opcodes.IAND:
+ case Opcodes.IOR:
+ case Opcodes.IXOR:
+ case Opcodes.ISHL:
+ case Opcodes.ISHR:
+ case Opcodes.IUSHR:
+ case Opcodes.L2I:
+ case Opcodes.D2I:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LADD:
+ case Opcodes.LSUB:
+ case Opcodes.LMUL:
+ case Opcodes.LDIV:
+ case Opcodes.LREM:
+ case Opcodes.LAND:
+ case Opcodes.LOR:
+ case Opcodes.LXOR:
+ pop(4);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FADD:
+ case Opcodes.FSUB:
+ case Opcodes.FMUL:
+ case Opcodes.FDIV:
+ case Opcodes.FREM:
+ case Opcodes.L2F:
+ case Opcodes.D2F:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DADD:
+ case Opcodes.DSUB:
+ case Opcodes.DMUL:
+ case Opcodes.DDIV:
+ case Opcodes.DREM:
+ pop(4);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LSHL:
+ case Opcodes.LSHR:
+ case Opcodes.LUSHR:
+ pop(3);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.IINC:
+ set(arg, INTEGER);
+ break;
+ case Opcodes.I2L:
+ case Opcodes.F2L:
+ pop(1);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.I2F:
+ pop(1);
+ push(FLOAT);
+ break;
+ case Opcodes.I2D:
+ case Opcodes.F2D:
+ pop(1);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.F2I:
+ case Opcodes.ARRAYLENGTH:
+ case Opcodes.INSTANCEOF:
+ pop(1);
+ push(INTEGER);
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ pop(4);
+ push(INTEGER);
+ break;
+ case Opcodes.JSR:
+ case Opcodes.RET:
+ throw new RuntimeException(
+ "JSR/RET are not supported with computeFrames option");
+ case Opcodes.GETSTATIC:
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTSTATIC:
+ pop(item.strVal3);
+ break;
+ case Opcodes.GETFIELD:
+ pop(1);
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTFIELD:
+ pop(item.strVal3);
+ pop();
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE:
+ pop(item.strVal3);
+ if (opcode != Opcodes.INVOKESTATIC) {
t1 = pop();
- t2 = pop();
- push(t1);
- push(t2);
- break;
- case Opcodes.IADD:
- case Opcodes.ISUB:
- case Opcodes.IMUL:
- case Opcodes.IDIV:
- case Opcodes.IREM:
- case Opcodes.IAND:
- case Opcodes.IOR:
- case Opcodes.IXOR:
- case Opcodes.ISHL:
- case Opcodes.ISHR:
- case Opcodes.IUSHR:
- case Opcodes.L2I:
- case Opcodes.D2I:
- case Opcodes.FCMPL:
- case Opcodes.FCMPG:
- pop(2);
- push(INTEGER);
- break;
- case Opcodes.LADD:
- case Opcodes.LSUB:
- case Opcodes.LMUL:
- case Opcodes.LDIV:
- case Opcodes.LREM:
- case Opcodes.LAND:
- case Opcodes.LOR:
- case Opcodes.LXOR:
- pop(4);
- push(LONG);
- push(TOP);
- break;
- case Opcodes.FADD:
- case Opcodes.FSUB:
- case Opcodes.FMUL:
- case Opcodes.FDIV:
- case Opcodes.FREM:
- case Opcodes.L2F:
- case Opcodes.D2F:
- pop(2);
- push(FLOAT);
- break;
- case Opcodes.DADD:
- case Opcodes.DSUB:
- case Opcodes.DMUL:
- case Opcodes.DDIV:
- case Opcodes.DREM:
- pop(4);
- push(DOUBLE);
- push(TOP);
- break;
- case Opcodes.LSHL:
- case Opcodes.LSHR:
- case Opcodes.LUSHR:
- pop(3);
- push(LONG);
- push(TOP);
- break;
- case Opcodes.IINC:
- set(arg, INTEGER);
- break;
- case Opcodes.I2L:
- case Opcodes.F2L:
- pop(1);
- push(LONG);
- push(TOP);
- break;
- case Opcodes.I2F:
- pop(1);
- push(FLOAT);
- break;
- case Opcodes.I2D:
- case Opcodes.F2D:
- pop(1);
- push(DOUBLE);
- push(TOP);
- break;
- case Opcodes.F2I:
- case Opcodes.ARRAYLENGTH:
- case Opcodes.INSTANCEOF:
- pop(1);
- push(INTEGER);
- break;
- case Opcodes.LCMP:
- case Opcodes.DCMPL:
- case Opcodes.DCMPG:
- pop(4);
- push(INTEGER);
- break;
- case Opcodes.JSR:
- case Opcodes.RET:
- throw new RuntimeException("JSR/RET are not supported with computeFrames option");
- case Opcodes.GETSTATIC:
- push(cw, item.strVal3);
- break;
- case Opcodes.PUTSTATIC:
- pop(item.strVal3);
- break;
- case Opcodes.GETFIELD:
- pop(1);
- push(cw, item.strVal3);
- break;
- case Opcodes.PUTFIELD:
- pop(item.strVal3);
- pop();
- break;
- case Opcodes.INVOKEVIRTUAL:
- case Opcodes.INVOKESPECIAL:
- case Opcodes.INVOKESTATIC:
- case Opcodes.INVOKEINTERFACE:
- pop(item.strVal3);
- if (opcode != Opcodes.INVOKESTATIC) {
- t1 = pop();
- if (opcode == Opcodes.INVOKESPECIAL
- && item.strVal2.charAt(0) == '<')
- {
- init(t1);
- }
+ if (opcode == Opcodes.INVOKESPECIAL
+ && item.strVal2.charAt(0) == '<') {
+ init(t1);
}
- push(cw, item.strVal3);
+ }
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.INVOKEDYNAMIC:
+ pop(item.strVal2);
+ push(cw, item.strVal2);
+ break;
+ case Opcodes.NEW:
+ push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+ break;
+ case Opcodes.NEWARRAY:
+ pop();
+ switch (arg) {
+ case Opcodes.T_BOOLEAN:
+ push(ARRAY_OF | BOOLEAN);
break;
- case Opcodes.INVOKEDYNAMIC:
- pop(item.strVal2);
- push(cw, item.strVal2);
+ case Opcodes.T_CHAR:
+ push(ARRAY_OF | CHAR);
break;
- case Opcodes.NEW:
- push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+ case Opcodes.T_BYTE:
+ push(ARRAY_OF | BYTE);
break;
- case Opcodes.NEWARRAY:
- pop();
- switch (arg) {
- case Opcodes.T_BOOLEAN:
- push(ARRAY_OF | BOOLEAN);
- break;
- case Opcodes.T_CHAR:
- push(ARRAY_OF | CHAR);
- break;
- case Opcodes.T_BYTE:
- push(ARRAY_OF | BYTE);
- break;
- case Opcodes.T_SHORT:
- push(ARRAY_OF | SHORT);
- break;
- case Opcodes.T_INT:
- push(ARRAY_OF | INTEGER);
- break;
- case Opcodes.T_FLOAT:
- push(ARRAY_OF | FLOAT);
- break;
- case Opcodes.T_DOUBLE:
- push(ARRAY_OF | DOUBLE);
- break;
- // case Opcodes.T_LONG:
- default:
- push(ARRAY_OF | LONG);
- break;
- }
+ case Opcodes.T_SHORT:
+ push(ARRAY_OF | SHORT);
break;
- case Opcodes.ANEWARRAY:
- String s = item.strVal1;
- pop();
- if (s.charAt(0) == '[') {
- push(cw, '[' + s);
- } else {
- push(ARRAY_OF | OBJECT | cw.addType(s));
- }
+ case Opcodes.T_INT:
+ push(ARRAY_OF | INTEGER);
break;
- case Opcodes.CHECKCAST:
- s = item.strVal1;
- pop();
- if (s.charAt(0) == '[') {
- push(cw, s);
- } else {
- push(OBJECT | cw.addType(s));
- }
+ case Opcodes.T_FLOAT:
+ push(ARRAY_OF | FLOAT);
break;
- // case Opcodes.MULTIANEWARRAY:
+ case Opcodes.T_DOUBLE:
+ push(ARRAY_OF | DOUBLE);
+ break;
+ // case Opcodes.T_LONG:
default:
- pop(arg);
- push(cw, item.strVal1);
+ push(ARRAY_OF | LONG);
break;
+ }
+ break;
+ case Opcodes.ANEWARRAY:
+ String s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, '[' + s);
+ } else {
+ push(ARRAY_OF | OBJECT | cw.addType(s));
+ }
+ break;
+ case Opcodes.CHECKCAST:
+ s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, s);
+ } else {
+ push(OBJECT | cw.addType(s));
+ }
+ break;
+ // case Opcodes.MULTIANEWARRAY:
+ default:
+ pop(arg);
+ push(cw, item.strVal1);
+ break;
}
}
@@ -1260,10 +1273,13 @@ final class Frame {
* frames of this basic block. Returns <tt>true</tt> if the input frame of
* the given label has been changed by this operation.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param frame the basic block whose input frame must be updated.
- * @param edge the kind of the {@link Edge} between this label and 'label'.
- * See {@link Edge#info}.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param frame
+ * the basic block whose input frame must be updated.
+ * @param edge
+ * the kind of the {@link Edge} between this label and 'label'.
+ * See {@link Edge#info}.
* @return <tt>true</tt> if the input frame of the given label has been
* changed by this operation.
*/
@@ -1294,7 +1310,8 @@ final class Frame {
} else {
t = dim + inputStack[nStack - (s & VALUE)];
}
- if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0
+ && (t == LONG || t == DOUBLE)) {
t = TOP;
}
}
@@ -1346,7 +1363,8 @@ final class Frame {
} else {
t = dim + inputStack[nStack - (s & VALUE)];
}
- if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0
+ && (t == LONG || t == DOUBLE)) {
t = TOP;
}
}
@@ -1363,19 +1381,19 @@ final class Frame {
* type. Returns <tt>true</tt> if the type array has been modified by this
* operation.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param t the type with which the type array element must be merged.
- * @param types an array of types.
- * @param index the index of the type that must be merged in 'types'.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param t
+ * the type with which the type array element must be merged.
+ * @param types
+ * an array of types.
+ * @param index
+ * the index of the type that must be merged in 'types'.
* @return <tt>true</tt> if the type array has been modified by this
* operation.
*/
- private static boolean merge(
- final ClassWriter cw,
- int t,
- final int[] types,
- final int index)
- {
+ private static boolean merge(final ClassWriter cw, int t,
+ final int[] types, final int index) {
int u = types[index];
if (u == t) {
// if the types are equal, merge(u,t)=u, so there is no change
diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java
index be8f334192..5dd06a54b9 100644
--- a/src/asm/scala/tools/asm/Handle.java
+++ b/src/asm/scala/tools/asm/Handle.java
@@ -66,18 +66,23 @@ public final class Handle {
/**
* Constructs a new field or method handle.
*
- * @param tag the kind of field or method designated by this Handle. Must be
- * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
- * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
- * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
- * {@link Opcodes#H_INVOKESPECIAL},
- * {@link Opcodes#H_NEWINVOKESPECIAL} or
- * {@link Opcodes#H_INVOKEINTERFACE}.
- * @param owner the internal name of the field or method designed by this
- * handle.
- * @param name the name of the field or method designated by this handle.
- * @param desc the descriptor of the field or method designated by this
- * handle.
+ * @param tag
+ * the kind of field or method designated by this Handle. Must be
+ * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner
+ * the internal name of the field or method designed by this
+ * handle.
+ * @param name
+ * the name of the field or method designated by this handle.
+ * @param desc
+ * the descriptor of the field or method designated by this
+ * handle.
*/
public Handle(int tag, String owner, String name, String desc) {
this.tag = tag;
@@ -101,11 +106,9 @@ public final class Handle {
}
/**
- * Returns the internal name of the field or method designed by this
- * handle.
+ * Returns the internal name of the field or method designed by this handle.
*
- * @return the internal name of the field or method designed by this
- * handle.
+ * @return the internal name of the field or method designed by this handle.
*/
public String getOwner() {
return owner;
@@ -138,8 +141,8 @@ public final class Handle {
return false;
}
Handle h = (Handle) obj;
- return tag == h.tag && owner.equals(h.owner)
- && name.equals(h.name) && desc.equals(h.desc);
+ return tag == h.tag && owner.equals(h.owner) && name.equals(h.name)
+ && desc.equals(h.desc);
}
@Override
@@ -149,8 +152,13 @@ public final class Handle {
/**
* Returns the textual representation of this handle. The textual
- * representation is: <pre>owner '.' name desc ' ' '(' tag ')'</pre>. As
- * this format is unambiguous, it can be parsed if necessary.
+ * representation is:
+ *
+ * <pre>
+ * owner '.' name desc ' ' '(' tag ')'
+ * </pre>
+ *
+ * . As this format is unambiguous, it can be parsed if necessary.
*/
@Override
public String toString() {
diff --git a/src/asm/scala/tools/asm/Handler.java b/src/asm/scala/tools/asm/Handler.java
index 9e92bb98be..a06cb8152a 100644
--- a/src/asm/scala/tools/asm/Handler.java
+++ b/src/asm/scala/tools/asm/Handler.java
@@ -72,9 +72,12 @@ class Handler {
* Removes the range between start and end from the given exception
* handlers.
*
- * @param h an exception handler list.
- * @param start the start of the range to be removed.
- * @param end the end of the range to be removed. Maybe null.
+ * @param h
+ * an exception handler list.
+ * @param start
+ * the start of the range to be removed.
+ * @param end
+ * the end of the range to be removed. Maybe null.
* @return the exception handler list with the start-end range removed.
*/
static Handler remove(Handler h, Label start, Label end) {
diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java
index 021a0b11d3..94195a1082 100644
--- a/src/asm/scala/tools/asm/Item.java
+++ b/src/asm/scala/tools/asm/Item.java
@@ -53,8 +53,8 @@ final class Item {
* {@link ClassWriter#METH}, {@link ClassWriter#IMETH},
* {@link ClassWriter#MTYPE}, {@link ClassWriter#INDY}.
*
- * MethodHandle constant 9 variations are stored using a range
- * of 9 values from {@link ClassWriter#HANDLE_BASE} + 1 to
+ * MethodHandle constant 9 variations are stored using a range of 9 values
+ * from {@link ClassWriter#HANDLE_BASE} + 1 to
* {@link ClassWriter#HANDLE_BASE} + 9.
*
* Special Item types are used for Items that are stored in the ClassWriter
@@ -115,7 +115,8 @@ final class Item {
* Constructs an uninitialized {@link Item} for constant pool element at
* given position.
*
- * @param index index of the item to be constructed.
+ * @param index
+ * index of the item to be constructed.
*/
Item(final int index) {
this.index = index;
@@ -124,8 +125,10 @@ final class Item {
/**
* Constructs a copy of the given item.
*
- * @param index index of the item to be constructed.
- * @param i the item that must be copied into the item to be constructed.
+ * @param index
+ * index of the item to be constructed.
+ * @param i
+ * the item that must be copied into the item to be constructed.
*/
Item(final int index, final Item i) {
this.index = index;
@@ -141,7 +144,8 @@ final class Item {
/**
* Sets this item to an integer item.
*
- * @param intVal the value of this item.
+ * @param intVal
+ * the value of this item.
*/
void set(final int intVal) {
this.type = ClassWriter.INT;
@@ -152,7 +156,8 @@ final class Item {
/**
* Sets this item to a long item.
*
- * @param longVal the value of this item.
+ * @param longVal
+ * the value of this item.
*/
void set(final long longVal) {
this.type = ClassWriter.LONG;
@@ -163,7 +168,8 @@ final class Item {
/**
* Sets this item to a float item.
*
- * @param floatVal the value of this item.
+ * @param floatVal
+ * the value of this item.
*/
void set(final float floatVal) {
this.type = ClassWriter.FLOAT;
@@ -174,7 +180,8 @@ final class Item {
/**
* Sets this item to a double item.
*
- * @param doubleVal the value of this item.
+ * @param doubleVal
+ * the value of this item.
*/
void set(final double doubleVal) {
this.type = ClassWriter.DOUBLE;
@@ -185,49 +192,53 @@ final class Item {
/**
* Sets this item to an item that do not hold a primitive value.
*
- * @param type the type of this item.
- * @param strVal1 first part of the value of this item.
- * @param strVal2 second part of the value of this item.
- * @param strVal3 third part of the value of this item.
+ * @param type
+ * the type of this item.
+ * @param strVal1
+ * first part of the value of this item.
+ * @param strVal2
+ * second part of the value of this item.
+ * @param strVal3
+ * third part of the value of this item.
*/
- void set(
- final int type,
- final String strVal1,
- final String strVal2,
- final String strVal3)
- {
+ void set(final int type, final String strVal1, final String strVal2,
+ final String strVal3) {
this.type = type;
this.strVal1 = strVal1;
this.strVal2 = strVal2;
this.strVal3 = strVal3;
switch (type) {
- case ClassWriter.UTF8:
- case ClassWriter.STR:
- case ClassWriter.CLASS:
- case ClassWriter.MTYPE:
- case ClassWriter.TYPE_NORMAL:
- hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
- return;
- case ClassWriter.NAME_TYPE:
- hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
- * strVal2.hashCode());
- return;
- // ClassWriter.FIELD:
- // ClassWriter.METH:
- // ClassWriter.IMETH:
- // ClassWriter.HANDLE_BASE + 1..9
- default:
- hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
- * strVal2.hashCode() * strVal3.hashCode());
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
+ return;
+ case ClassWriter.NAME_TYPE: {
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode());
+ return;
+ }
+ // ClassWriter.FIELD:
+ // ClassWriter.METH:
+ // ClassWriter.IMETH:
+ // ClassWriter.HANDLE_BASE + 1..9
+ default:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode() * strVal3.hashCode());
}
}
/**
* Sets the item to an InvokeDynamic item.
*
- * @param name invokedynamic's name.
- * @param desc invokedynamic's desc.
- * @param bsmIndex zero based index into the class attribute BootrapMethods.
+ * @param name
+ * invokedynamic's name.
+ * @param desc
+ * invokedynamic's desc.
+ * @param bsmIndex
+ * zero based index into the class attribute BootrapMethods.
*/
void set(String name, String desc, int bsmIndex) {
this.type = ClassWriter.INDY;
@@ -241,10 +252,12 @@ final class Item {
/**
* Sets the item to a BootstrapMethod item.
*
- * @param position position in byte in the class attribute BootrapMethods.
- * @param hashCode hashcode of the item. This hashcode is processed from
- * the hashcode of the bootstrap method and the hashcode of
- * all bootstrap arguments.
+ * @param position
+ * position in byte in the class attribute BootrapMethods.
+ * @param hashCode
+ * hashcode of the item. This hashcode is processed from the
+ * hashcode of the bootstrap method and the hashcode of all
+ * bootstrap arguments.
*/
void set(int position, int hashCode) {
this.type = ClassWriter.BSM;
@@ -256,41 +269,42 @@ final class Item {
* Indicates if the given item is equal to this one. <i>This method assumes
* that the two items have the same {@link #type}</i>.
*
- * @param i the item to be compared to this one. Both items must have the
- * same {@link #type}.
+ * @param i
+ * the item to be compared to this one. Both items must have the
+ * same {@link #type}.
* @return <tt>true</tt> if the given item if equal to this one,
* <tt>false</tt> otherwise.
*/
boolean isEqualTo(final Item i) {
switch (type) {
- case ClassWriter.UTF8:
- case ClassWriter.STR:
- case ClassWriter.CLASS:
- case ClassWriter.MTYPE:
- case ClassWriter.TYPE_NORMAL:
- return i.strVal1.equals(strVal1);
- case ClassWriter.TYPE_MERGED:
- case ClassWriter.LONG:
- case ClassWriter.DOUBLE:
- return i.longVal == longVal;
- case ClassWriter.INT:
- case ClassWriter.FLOAT:
- return i.intVal == intVal;
- case ClassWriter.TYPE_UNINIT:
- return i.intVal == intVal && i.strVal1.equals(strVal1);
- case ClassWriter.NAME_TYPE:
- return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
- case ClassWriter.INDY:
- return i.longVal == longVal && i.strVal1.equals(strVal1)
- && i.strVal2.equals(strVal2);
-
- // case ClassWriter.FIELD:
- // case ClassWriter.METH:
- // case ClassWriter.IMETH:
- // case ClassWriter.HANDLE_BASE + 1..9
- default:
- return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
- && i.strVal3.equals(strVal3);
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ return i.strVal1.equals(strVal1);
+ case ClassWriter.TYPE_MERGED:
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ return i.longVal == longVal;
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ return i.intVal == intVal;
+ case ClassWriter.TYPE_UNINIT:
+ return i.intVal == intVal && i.strVal1.equals(strVal1);
+ case ClassWriter.NAME_TYPE:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
+ case ClassWriter.INDY: {
+ return i.longVal == longVal && i.strVal1.equals(strVal1)
+ && i.strVal2.equals(strVal2);
+ }
+ // case ClassWriter.FIELD:
+ // case ClassWriter.METH:
+ // case ClassWriter.IMETH:
+ // case ClassWriter.HANDLE_BASE + 1..9
+ default:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
+ && i.strVal3.equals(strVal3);
}
}
diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
index 712c7f251f..5d5529ce74 100644
--- a/src/asm/scala/tools/asm/Label.java
+++ b/src/asm/scala/tools/asm/Label.java
@@ -32,9 +32,9 @@ package scala.tools.asm;
/**
* A label represents a position in the bytecode of a method. Labels are used
* for jump, goto, and switch instructions, and for try catch blocks. A label
- * designates the <i>instruction</i> that is just after. Note however that
- * there can be other elements between a label and the instruction it
- * designates (such as other labels, stack map frames, line numbers, etc.).
+ * designates the <i>instruction</i> that is just after. Note however that there
+ * can be other elements between a label and the instruction it designates (such
+ * as other labels, stack map frames, line numbers, etc.).
*
* @author Eric Bruneton
*/
@@ -110,8 +110,8 @@ public class Label {
/**
* Field used to associate user information to a label. Warning: this field
* is used by the ASM tree package. In order to use it with the ASM tree
- * package you must override the {@link
- * org.objectweb.asm.tree.MethodNode#getLabelNode} method.
+ * package you must override the
+ * {@link scala.tools.asm.tree.MethodNode#getLabelNode} method.
*/
public Object info;
@@ -154,7 +154,7 @@ public class Label {
* indicates if this reference uses 2 or 4 bytes, and its absolute value
* gives the position of the bytecode instruction. This array is also used
* as a bitset to store the subroutines to which a basic block belongs. This
- * information is needed in {@linked MethodWriter#visitMaxs}, after all
+ * information is needed in {@linked MethodWriter#visitMaxs}, after all
* forward references have been resolved. Hence the same array can be used
* for both purposes without problems.
*/
@@ -177,11 +177,11 @@ public class Label {
* state of the local variables and the operand stack at the end of each
* basic block, called the "output frame", <i>relatively</i> to the frame
* state at the beginning of the basic block, which is called the "input
- * frame", and which is <i>unknown</i> during this step. The second step,
- * in {@link MethodWriter#visitMaxs}, is a fix point algorithm that
- * computes information about the input frame of each basic block, from the
- * input state of the first basic block (known from the method signature),
- * and by the using the previously computed relative output frames.
+ * frame", and which is <i>unknown</i> during this step. The second step, in
+ * {@link MethodWriter#visitMaxs}, is a fix point algorithm that computes
+ * information about the input frame of each basic block, from the input
+ * state of the first basic block (known from the method signature), and by
+ * the using the previously computed relative output frames.
*
* The algorithm used to compute the maximum stack size only computes the
* relative output and absolute input stack heights, while the algorithm
@@ -266,11 +266,13 @@ public class Label {
* generators or adapters.</i>
*
* @return the offset corresponding to this label.
- * @throws IllegalStateException if this label is not resolved yet.
+ * @throws IllegalStateException
+ * if this label is not resolved yet.
*/
public int getOffset() {
if ((status & RESOLVED) == 0) {
- throw new IllegalStateException("Label offset position has not been resolved yet");
+ throw new IllegalStateException(
+ "Label offset position has not been resolved yet");
}
return position;
}
@@ -281,21 +283,21 @@ public class Label {
* directly. Otherwise, a null offset is written and a new forward reference
* is declared for this label.
*
- * @param owner the code writer that calls this method.
- * @param out the bytecode of the method.
- * @param source the position of first byte of the bytecode instruction that
- * contains this label.
- * @param wideOffset <tt>true</tt> if the reference must be stored in 4
- * bytes, or <tt>false</tt> if it must be stored with 2 bytes.
- * @throws IllegalArgumentException if this label has not been created by
- * the given code writer.
- */
- void put(
- final MethodWriter owner,
- final ByteVector out,
- final int source,
- final boolean wideOffset)
- {
+ * @param owner
+ * the code writer that calls this method.
+ * @param out
+ * the bytecode of the method.
+ * @param source
+ * the position of first byte of the bytecode instruction that
+ * contains this label.
+ * @param wideOffset
+ * <tt>true</tt> if the reference must be stored in 4 bytes, or
+ * <tt>false</tt> if it must be stored with 2 bytes.
+ * @throws IllegalArgumentException
+ * if this label has not been created by the given code writer.
+ */
+ void put(final MethodWriter owner, final ByteVector out, final int source,
+ final boolean wideOffset) {
if ((status & RESOLVED) == 0) {
if (wideOffset) {
addReference(-1 - source, out.length);
@@ -319,25 +321,21 @@ public class Label {
* yet. For backward references, the offset of the reference can be, and
* must be, computed and stored directly.
*
- * @param sourcePosition the position of the referencing instruction. This
- * position will be used to compute the offset of this forward
- * reference.
- * @param referencePosition the position where the offset for this forward
- * reference must be stored.
- */
- private void addReference(
- final int sourcePosition,
- final int referencePosition)
- {
+ * @param sourcePosition
+ * the position of the referencing instruction. This position
+ * will be used to compute the offset of this forward reference.
+ * @param referencePosition
+ * the position where the offset for this forward reference must
+ * be stored.
+ */
+ private void addReference(final int sourcePosition,
+ final int referencePosition) {
if (srcAndRefPositions == null) {
srcAndRefPositions = new int[6];
}
if (referenceCount >= srcAndRefPositions.length) {
int[] a = new int[srcAndRefPositions.length + 6];
- System.arraycopy(srcAndRefPositions,
- 0,
- a,
- 0,
+ System.arraycopy(srcAndRefPositions, 0, a, 0,
srcAndRefPositions.length);
srcAndRefPositions = a;
}
@@ -351,9 +349,12 @@ public class Label {
* position becomes known. This method fills in the blanks that where left
* in the bytecode by each forward reference previously added to this label.
*
- * @param owner the code writer that calls this method.
- * @param position the position of this label in the bytecode.
- * @param data the bytecode of the method.
+ * @param owner
+ * the code writer that calls this method.
+ * @param position
+ * the position of this label in the bytecode.
+ * @param data
+ * the bytecode of the method.
* @return <tt>true</tt> if a blank that was left for this label was to
* small to store the offset. In such a case the corresponding jump
* instruction is replaced with a pseudo instruction (using unused
@@ -361,14 +362,12 @@ public class Label {
* instructions will need to be replaced with true instructions with
* wider offsets (4 bytes instead of 2). This is done in
* {@link MethodWriter#resizeInstructions}.
- * @throws IllegalArgumentException if this label has already been resolved,
- * or if it has not been created by the given code writer.
- */
- boolean resolve(
- final MethodWriter owner,
- final int position,
- final byte[] data)
- {
+ * @throws IllegalArgumentException
+ * if this label has already been resolved, or if it has not
+ * been created by the given code writer.
+ */
+ boolean resolve(final MethodWriter owner, final int position,
+ final byte[] data) {
boolean needUpdate = false;
this.status |= RESOLVED;
this.position = position;
@@ -431,7 +430,8 @@ public class Label {
/**
* Returns true is this basic block belongs to the given subroutine.
*
- * @param id a subroutine id.
+ * @param id
+ * a subroutine id.
* @return true is this basic block belongs to the given subroutine.
*/
boolean inSubroutine(final long id) {
@@ -445,7 +445,8 @@ public class Label {
* Returns true if this basic block and the given one belong to a common
* subroutine.
*
- * @param block another basic block.
+ * @param block
+ * another basic block.
* @return true if this basic block and the given one belong to a common
* subroutine.
*/
@@ -464,8 +465,10 @@ public class Label {
/**
* Marks this basic block as belonging to the given subroutine.
*
- * @param id a subroutine id.
- * @param nbSubroutines the total number of subroutines in the method.
+ * @param id
+ * a subroutine id.
+ * @param nbSubroutines
+ * the total number of subroutines in the method.
*/
void addToSubroutine(final long id, final int nbSubroutines) {
if ((status & VISITED) == 0) {
@@ -481,14 +484,16 @@ public class Label {
* flow graph to find all the blocks that are reachable from the current
* block WITHOUT following any JSR target.
*
- * @param JSR a JSR block that jumps to this subroutine. If this JSR is not
- * null it is added to the successor of the RET blocks found in the
- * subroutine.
- * @param id the id of this subroutine.
- * @param nbSubroutines the total number of subroutines in the method.
- */
- void visitSubroutine(final Label JSR, final long id, final int nbSubroutines)
- {
+ * @param JSR
+ * a JSR block that jumps to this subroutine. If this JSR is not
+ * null it is added to the successor of the RET blocks found in
+ * the subroutine.
+ * @param id
+ * the id of this subroutine.
+ * @param nbSubroutines
+ * the total number of subroutines in the method.
+ */
+ void visitSubroutine(final Label JSR, final long id, final int nbSubroutines) {
// user managed stack of labels, to avoid using a recursive method
// (recursivity can lead to stack overflow with very large methods)
Label stack = this;
diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java
index a8a859a6a9..e43ca97823 100644
--- a/src/asm/scala/tools/asm/MethodVisitor.java
+++ b/src/asm/scala/tools/asm/MethodVisitor.java
@@ -30,19 +30,19 @@
package scala.tools.asm;
/**
- * A visitor to visit a Java method. The methods of this class must be
- * called in the following order: [ <tt>visitAnnotationDefault</tt> ] (
+ * A visitor to visit a Java method. The methods of this class must be called in
+ * the following order: [ <tt>visitAnnotationDefault</tt> ] (
* <tt>visitAnnotation</tt> | <tt>visitParameterAnnotation</tt> |
* <tt>visitAttribute</tt> )* [ <tt>visitCode</tt> ( <tt>visitFrame</tt> |
- * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> | <tt>visitTryCatchBlock</tt> |
- * <tt>visitLocalVariable</tt> | <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ]
- * <tt>visitEnd</tt>. In addition, the <tt>visit</tt><i>X</i>Insn</tt>
- * and <tt>visitLabel</tt> methods must be called in the sequential order of
- * the bytecode instructions of the visited code, <tt>visitTryCatchBlock</tt>
- * must be called <i>before</i> the labels passed as arguments have been
- * visited, and the <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt>
- * methods must be called <i>after</i> the labels passed as arguments have been
- * visited.
+ * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> |
+ * <tt>visitTryCatchBlock</tt> | <tt>visitLocalVariable</tt> |
+ * <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ] <tt>visitEnd</tt>. In
+ * addition, the <tt>visit</tt><i>X</i>Insn</tt> and <tt>visitLabel</tt> methods
+ * must be called in the sequential order of the bytecode instructions of the
+ * visited code, <tt>visitTryCatchBlock</tt> must be called <i>before</i> the
+ * labels passed as arguments have been visited, and the
+ * <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt> methods must be
+ * called <i>after</i> the labels passed as arguments have been visited.
*
* @author Eric Bruneton
*/
@@ -63,8 +63,9 @@ public abstract class MethodVisitor {
/**
* Constructs a new {@link MethodVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public MethodVisitor(final int api) {
this(api, null);
@@ -73,15 +74,17 @@ public abstract class MethodVisitor {
/**
* Constructs a new {@link MethodVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param mv the method visitor to which this visitor must delegate method
- * calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param mv
+ * the method visitor to which this visitor must delegate method
+ * calls. May be null.
*/
public MethodVisitor(final int api, final MethodVisitor mv) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.mv = mv;
}
@@ -94,8 +97,8 @@ public abstract class MethodVisitor {
* Visits the default value of this annotation interface method.
*
* @return a visitor to the visit the actual default value of this
- * annotation interface method, or <tt>null</tt> if this visitor
- * is not interested in visiting this default value. The 'name'
+ * annotation interface method, or <tt>null</tt> if this visitor is
+ * not interested in visiting this default value. The 'name'
* parameters passed to the methods of this annotation visitor are
* ignored. Moreover, exacly one visit method must be called on this
* annotation visitor, followed by visitEnd.
@@ -110,8 +113,10 @@ public abstract class MethodVisitor {
/**
* Visits an annotation of this method.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
@@ -125,17 +130,17 @@ public abstract class MethodVisitor {
/**
* Visits an annotation of a parameter this method.
*
- * @param parameter the parameter index.
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param parameter
+ * the parameter index.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
- public AnnotationVisitor visitParameterAnnotation(
- int parameter,
- String desc,
- boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(int parameter,
+ String desc, boolean visible) {
if (mv != null) {
return mv.visitParameterAnnotation(parameter, desc, visible);
}
@@ -145,7 +150,8 @@ public abstract class MethodVisitor {
/**
* Visits a non standard attribute of this method.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(Attribute attr) {
if (mv != null) {
@@ -169,57 +175,74 @@ public abstract class MethodVisitor {
* such as GOTO or THROW, that is the target of a jump instruction, or that
* starts an exception handler block. The visited types must describe the
* values of the local variables and of the operand stack elements <i>just
- * before</i> <b>i</b> is executed. <br> <br> (*) this is mandatory only
- * for classes whose version is greater than or equal to
- * {@link Opcodes#V1_6 V1_6}. <br> <br> Packed frames are basically
- * "deltas" from the state of the previous frame (very first frame is
- * implicitly defined by the method's parameters and access flags): <ul>
+ * before</i> <b>i</b> is executed.<br>
+ * <br>
+ * (*) this is mandatory only for classes whose version is greater than or
+ * equal to {@link Opcodes#V1_6 V1_6}. <br>
+ * <br>
+ * The frames of a method must be given either in expanded form, or in
+ * compressed form (all frames must use the same format, i.e. you must not
+ * mix expanded and compressed frames within a single method):
+ * <ul>
+ * <li>In expanded form, all frames must have the F_NEW type.</li>
+ * <li>In compressed form, frames are basically "deltas" from the state of
+ * the previous frame:
+ * <ul>
* <li>{@link Opcodes#F_SAME} representing frame with exactly the same
- * locals as the previous frame and with the empty stack.</li> <li>{@link Opcodes#F_SAME1}
- * representing frame with exactly the same locals as the previous frame and
- * with single value on the stack (<code>nStack</code> is 1 and
- * <code>stack[0]</code> contains value for the type of the stack item).</li>
+ * locals as the previous frame and with the empty stack.</li>
+ * <li>{@link Opcodes#F_SAME1} representing frame with exactly the same
+ * locals as the previous frame and with single value on the stack (
+ * <code>nStack</code> is 1 and <code>stack[0]</code> contains value for the
+ * type of the stack item).</li>
* <li>{@link Opcodes#F_APPEND} representing frame with current locals are
* the same as the locals in the previous frame, except that additional
* locals are defined (<code>nLocal</code> is 1, 2 or 3 and
* <code>local</code> elements contains values representing added types).</li>
- * <li>{@link Opcodes#F_CHOP} representing frame with current locals are
- * the same as the locals in the previous frame, except that the last 1-3
- * locals are absent and with the empty stack (<code>nLocals</code> is 1,
- * 2 or 3). </li> <li>{@link Opcodes#F_FULL} representing complete frame
- * data.</li> </li> </ul>
+ * <li>{@link Opcodes#F_CHOP} representing frame with current locals are the
+ * same as the locals in the previous frame, except that the last 1-3 locals
+ * are absent and with the empty stack (<code>nLocals</code> is 1, 2 or 3).</li>
+ * <li>{@link Opcodes#F_FULL} representing complete frame data.</li></li>
+ * </ul>
+ * </ul> <br>
+ * In both cases the first frame, corresponding to the method's parameters
+ * and access flags, is implicit and must not be visited. Also, it is
+ * illegal to visit two or more frames for the same code location (i.e., at
+ * least one instruction must be visited between two calls to visitFrame).
*
- * @param type the type of this stack map frame. Must be
- * {@link Opcodes#F_NEW} for expanded frames, or
- * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
- * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
- * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed
- * frames.
- * @param nLocal the number of local variables in the visited frame.
- * @param local the local variable types in this frame. This array must not
- * be modified. Primitive types are represented by
- * {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
- * {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
- * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
- * {@link Opcodes#UNINITIALIZED_THIS} (long and double are
- * represented by a single element). Reference types are represented
- * by String objects (representing internal names), and uninitialized
- * types by Label objects (this label designates the NEW instruction
- * that created this uninitialized value).
- * @param nStack the number of operand stack elements in the visited frame.
- * @param stack the operand stack types in this frame. This array must not
- * be modified. Its content has the same format as the "local" array.
- * @throws IllegalStateException if a frame is visited just after another
- * one, without any instruction between the two (unless this frame
- * is a Opcodes#F_SAME frame, in which case it is silently ignored).
+ * @param type
+ * the type of this stack map frame. Must be
+ * {@link Opcodes#F_NEW} for expanded frames, or
+ * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for
+ * compressed frames.
+ * @param nLocal
+ * the number of local variables in the visited frame.
+ * @param local
+ * the local variable types in this frame. This array must not be
+ * modified. Primitive types are represented by
+ * {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
+ * {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
+ * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
+ * {@link Opcodes#UNINITIALIZED_THIS} (long and double are
+ * represented by a single element). Reference types are
+ * represented by String objects (representing internal names),
+ * and uninitialized types by Label objects (this label
+ * designates the NEW instruction that created this uninitialized
+ * value).
+ * @param nStack
+ * the number of operand stack elements in the visited frame.
+ * @param stack
+ * the operand stack types in this frame. This array must not be
+ * modified. Its content has the same format as the "local"
+ * array.
+ * @throws IllegalStateException
+ * if a frame is visited just after another one, without any
+ * instruction between the two (unless this frame is a
+ * Opcodes#F_SAME frame, in which case it is silently ignored).
*/
- public void visitFrame(
- int type,
- int nLocal,
- Object[] local,
- int nStack,
- Object[] stack)
- {
+ public void visitFrame(int type, int nLocal, Object[] local, int nStack,
+ Object[] stack) {
if (mv != null) {
mv.visitFrame(type, nLocal, local, nStack, stack);
}
@@ -232,20 +255,22 @@ public abstract class MethodVisitor {
/**
* Visits a zero operand instruction.
*
- * @param opcode the opcode of the instruction to be visited. This opcode is
- * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2,
- * ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, FCONST_0,
- * FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD, FALOAD,
- * DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, FASTORE,
- * DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, DUP,
- * DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD, FADD,
- * DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, LDIV,
- * FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL,
- * LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR,
- * I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B,
- * I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
- * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
- * MONITORENTER, or MONITOREXIT.
+ * @param opcode
+ * the opcode of the instruction to be visited. This opcode is
+ * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+ * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+ * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD,
+ * LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD,
+ * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE,
+ * SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1,
+ * DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB,
+ * IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM,
+ * FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR,
+ * IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
+ * L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S,
+ * LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN,
+ * DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER,
+ * or MONITOREXIT.
*/
public void visitInsn(int opcode) {
if (mv != null) {
@@ -256,17 +281,20 @@ public abstract class MethodVisitor {
/**
* Visits an instruction with a single int operand.
*
- * @param opcode the opcode of the instruction to be visited. This opcode is
- * either BIPUSH, SIPUSH or NEWARRAY.
- * @param operand the operand of the instruction to be visited.<br> When
- * opcode is BIPUSH, operand value should be between Byte.MIN_VALUE
- * and Byte.MAX_VALUE.<br> When opcode is SIPUSH, operand value
- * should be between Short.MIN_VALUE and Short.MAX_VALUE.<br> When
- * opcode is NEWARRAY, operand value should be one of
- * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
- * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
- * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
- * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
+ * @param opcode
+ * the opcode of the instruction to be visited. This opcode is
+ * either BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand
+ * the operand of the instruction to be visited.<br>
+ * When opcode is BIPUSH, operand value should be between
+ * Byte.MIN_VALUE and Byte.MAX_VALUE.<br>
+ * When opcode is SIPUSH, operand value should be between
+ * Short.MIN_VALUE and Short.MAX_VALUE.<br>
+ * When opcode is NEWARRAY, operand value should be one of
+ * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
+ * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
+ * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
+ * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
*/
public void visitIntInsn(int opcode, int operand) {
if (mv != null) {
@@ -278,11 +306,13 @@ public abstract class MethodVisitor {
* Visits a local variable instruction. A local variable instruction is an
* instruction that loads or stores the value of a local variable.
*
- * @param opcode the opcode of the local variable instruction to be visited.
- * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE,
- * LSTORE, FSTORE, DSTORE, ASTORE or RET.
- * @param var the operand of the instruction to be visited. This operand is
- * the index of a local variable.
+ * @param opcode
+ * the opcode of the local variable instruction to be visited.
+ * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD,
+ * ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var
+ * the operand of the instruction to be visited. This operand is
+ * the index of a local variable.
*/
public void visitVarInsn(int opcode, int var) {
if (mv != null) {
@@ -294,11 +324,13 @@ public abstract class MethodVisitor {
* Visits a type instruction. A type instruction is an instruction that
* takes the internal name of a class as parameter.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
- * @param type the operand of the instruction to be visited. This operand
- * must be the internal name of an object or array class (see {@link
- * Type#getInternalName() getInternalName}).
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param type
+ * the operand of the instruction to be visited. This operand
+ * must be the internal name of an object or array class (see
+ * {@link Type#getInternalName() getInternalName}).
*/
public void visitTypeInsn(int opcode, String type) {
if (mv != null) {
@@ -310,14 +342,19 @@ public abstract class MethodVisitor {
* Visits a field instruction. A field instruction is an instruction that
* loads or stores the value of a field of an object.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
- * @param owner the internal name of the field's owner class (see {@link
- * Type#getInternalName() getInternalName}).
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link Type Type}).
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner
+ * the internal name of the field's owner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link Type Type}).
*/
- public void visitFieldInsn(int opcode, String owner, String name, String desc) {
+ public void visitFieldInsn(int opcode, String owner, String name,
+ String desc) {
if (mv != null) {
mv.visitFieldInsn(opcode, owner, name, desc);
}
@@ -327,15 +364,20 @@ public abstract class MethodVisitor {
* Visits a method instruction. A method instruction is an instruction that
* invokes a method.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC
- * or INVOKEINTERFACE.
- * @param owner the internal name of the method's owner class (see {@link
- * Type#getInternalName() getInternalName}).
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner
+ * the internal name of the method's owner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
*/
- public void visitMethodInsn(int opcode, String owner, String name, String desc) {
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc) {
if (mv != null) {
mv.visitMethodInsn(opcode, owner, name, desc);
}
@@ -344,16 +386,21 @@ public abstract class MethodVisitor {
/**
* Visits an invokedynamic instruction.
*
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
- * @param bsm the bootstrap method.
- * @param bsmArgs the bootstrap method constant arguments. Each argument
- * must be an {@link Integer}, {@link Float}, {@link Long},
- * {@link Double}, {@link String}, {@link Type} or {@link Handle}
- * value. This method is allowed to modify the content of the array
- * so a caller should expect that this array may change.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the bootstrap method constant arguments. Each argument must be
+ * an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double}, {@link String}, {@link Type} or {@link Handle}
+ * value. This method is allowed to modify the content of the
+ * array so a caller should expect that this array may change.
*/
- public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
if (mv != null) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
}
@@ -363,13 +410,15 @@ public abstract class MethodVisitor {
* Visits a jump instruction. A jump instruction is an instruction that may
* jump to another instruction.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
- * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
- * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
- * @param label the operand of the instruction to be visited. This operand
- * is a label that designates the instruction to which the jump
- * instruction may jump.
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+ * IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label
+ * the operand of the instruction to be visited. This operand is
+ * a label that designates the instruction to which the jump
+ * instruction may jump.
*/
public void visitJumpInsn(int opcode, Label label) {
if (mv != null) {
@@ -381,7 +430,8 @@ public abstract class MethodVisitor {
* Visits a label. A label designates the instruction that will be visited
* just after it.
*
- * @param label a {@link Label Label} object.
+ * @param label
+ * a {@link Label Label} object.
*/
public void visitLabel(Label label) {
if (mv != null) {
@@ -398,41 +448,44 @@ public abstract class MethodVisitor {
* future versions of the Java Virtual Machine. To easily detect new
* constant types, implementations of this method should check for
* unexpected constant types, like this:
+ *
* <pre>
* if (cst instanceof Integer) {
- * // ...
+ * // ...
* } else if (cst instanceof Float) {
- * // ...
+ * // ...
* } else if (cst instanceof Long) {
- * // ...
- * } else if (cst instanceof Double) {
- * // ...
- * } else if (cst instanceof String) {
- * // ...
- * } else if (cst instanceof Type) {
- * int sort = ((Type) cst).getSort();
- * if (sort == Type.OBJECT) {
* // ...
- * } else if (sort == Type.ARRAY) {
+ * } else if (cst instanceof Double) {
* // ...
- * } else if (sort == Type.METHOD) {
+ * } else if (cst instanceof String) {
* // ...
- * } else {
- * // throw an exception
- * }
+ * } else if (cst instanceof Type) {
+ * int sort = ((Type) cst).getSort();
+ * if (sort == Type.OBJECT) {
+ * // ...
+ * } else if (sort == Type.ARRAY) {
+ * // ...
+ * } else if (sort == Type.METHOD) {
+ * // ...
+ * } else {
+ * // throw an exception
+ * }
* } else if (cst instanceof Handle) {
- * // ...
+ * // ...
* } else {
- * // throw an exception
- * }</pre>
+ * // throw an exception
+ * }
+ * </pre>
*
- * @param cst the constant to be loaded on the stack. This parameter must be
- * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double}, a {@link String}, a {@link Type} of OBJECT or ARRAY
- * sort for <tt>.class</tt> constants, for classes whose version is
- * 49.0, a {@link Type} of METHOD sort or a {@link Handle} for
- * MethodType and MethodHandle constants, for classes whose version
- * is 51.0.
+ * @param cst
+ * the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double}, a {@link String}, a {@link Type} of OBJECT or
+ * ARRAY sort for <tt>.class</tt> constants, for classes whose
+ * version is 49.0, a {@link Type} of METHOD sort or a
+ * {@link Handle} for MethodType and MethodHandle constants, for
+ * classes whose version is 51.0.
*/
public void visitLdcInsn(Object cst) {
if (mv != null) {
@@ -443,8 +496,10 @@ public abstract class MethodVisitor {
/**
* Visits an IINC instruction.
*
- * @param var index of the local variable to be incremented.
- * @param increment amount to increment the local variable by.
+ * @param var
+ * index of the local variable to be incremented.
+ * @param increment
+ * amount to increment the local variable by.
*/
public void visitIincInsn(int var, int increment) {
if (mv != null) {
@@ -455,13 +510,18 @@ public abstract class MethodVisitor {
/**
* Visits a TABLESWITCH instruction.
*
- * @param min the minimum key value.
- * @param max the maximum key value.
- * @param dflt beginning of the default handler block.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>min + i</tt> key.
+ * @param min
+ * the minimum key value.
+ * @param max
+ * the maximum key value.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>min + i</tt> key.
*/
- public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) {
+ public void visitTableSwitchInsn(int min, int max, Label dflt,
+ Label... labels) {
if (mv != null) {
mv.visitTableSwitchInsn(min, max, dflt, labels);
}
@@ -470,10 +530,13 @@ public abstract class MethodVisitor {
/**
* Visits a LOOKUPSWITCH instruction.
*
- * @param dflt beginning of the default handler block.
- * @param keys the values of the keys.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param keys
+ * the values of the keys.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>keys[i]</tt> key.
*/
public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
if (mv != null) {
@@ -484,8 +547,10 @@ public abstract class MethodVisitor {
/**
* Visits a MULTIANEWARRAY instruction.
*
- * @param desc an array type descriptor (see {@link Type Type}).
- * @param dims number of dimensions of the array to allocate.
+ * @param desc
+ * an array type descriptor (see {@link Type Type}).
+ * @param dims
+ * number of dimensions of the array to allocate.
*/
public void visitMultiANewArrayInsn(String desc, int dims) {
if (mv != null) {
@@ -500,17 +565,22 @@ public abstract class MethodVisitor {
/**
* Visits a try catch block.
*
- * @param start beginning of the exception handler's scope (inclusive).
- * @param end end of the exception handler's scope (exclusive).
- * @param handler beginning of the exception handler's code.
- * @param type internal name of the type of exceptions handled by the
- * handler, or <tt>null</tt> to catch any exceptions (for "finally"
- * blocks).
- * @throws IllegalArgumentException if one of the labels has already been
- * visited by this visitor (by the {@link #visitLabel visitLabel}
- * method).
+ * @param start
+ * beginning of the exception handler's scope (inclusive).
+ * @param end
+ * end of the exception handler's scope (exclusive).
+ * @param handler
+ * beginning of the exception handler's code.
+ * @param type
+ * internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for
+ * "finally" blocks).
+ * @throws IllegalArgumentException
+ * if one of the labels has already been visited by this visitor
+ * (by the {@link #visitLabel visitLabel} method).
*/
- public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {
+ public void visitTryCatchBlock(Label start, Label end, Label handler,
+ String type) {
if (mv != null) {
mv.visitTryCatchBlock(start, end, handler, type);
}
@@ -519,28 +589,28 @@ public abstract class MethodVisitor {
/**
* Visits a local variable declaration.
*
- * @param name the name of a local variable.
- * @param desc the type descriptor of this local variable.
- * @param signature the type signature of this local variable. May be
- * <tt>null</tt> if the local variable type does not use generic
- * types.
- * @param start the first instruction corresponding to the scope of this
- * local variable (inclusive).
- * @param end the last instruction corresponding to the scope of this local
- * variable (exclusive).
- * @param index the local variable's index.
- * @throws IllegalArgumentException if one of the labels has not already
- * been visited by this visitor (by the
- * {@link #visitLabel visitLabel} method).
+ * @param name
+ * the name of a local variable.
+ * @param desc
+ * the type descriptor of this local variable.
+ * @param signature
+ * the type signature of this local variable. May be
+ * <tt>null</tt> if the local variable type does not use generic
+ * types.
+ * @param start
+ * the first instruction corresponding to the scope of this local
+ * variable (inclusive).
+ * @param end
+ * the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index
+ * the local variable's index.
+ * @throws IllegalArgumentException
+ * if one of the labels has not already been visited by this
+ * visitor (by the {@link #visitLabel visitLabel} method).
*/
- public void visitLocalVariable(
- String name,
- String desc,
- String signature,
- Label start,
- Label end,
- int index)
- {
+ public void visitLocalVariable(String name, String desc, String signature,
+ Label start, Label end, int index) {
if (mv != null) {
mv.visitLocalVariable(name, desc, signature, start, end, index);
}
@@ -549,12 +619,14 @@ public abstract class MethodVisitor {
/**
* Visits a line number declaration.
*
- * @param line a line number. This number refers to the source file from
- * which the class was compiled.
- * @param start the first instruction corresponding to this line number.
- * @throws IllegalArgumentException if <tt>start</tt> has not already been
- * visited by this visitor (by the {@link #visitLabel visitLabel}
- * method).
+ * @param line
+ * a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start
+ * the first instruction corresponding to this line number.
+ * @throws IllegalArgumentException
+ * if <tt>start</tt> has not already been visited by this
+ * visitor (by the {@link #visitLabel visitLabel} method).
*/
public void visitLineNumber(int line, Label start) {
if (mv != null) {
@@ -566,8 +638,10 @@ public abstract class MethodVisitor {
* Visits the maximum stack size and the maximum number of local variables
* of the method.
*
- * @param maxStack maximum stack size of the method.
- * @param maxLocals maximum number of local variables for the method.
+ * @param maxStack
+ * maximum stack size of the method.
+ * @param maxLocals
+ * maximum number of local variables for the method.
*/
public void visitMaxs(int maxStack, int maxLocals) {
if (mv != null) {
diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java
index 321bacb6fc..f5fbd1e74f 100644
--- a/src/asm/scala/tools/asm/MethodWriter.java
+++ b/src/asm/scala/tools/asm/MethodWriter.java
@@ -42,7 +42,7 @@ class MethodWriter extends MethodVisitor {
/**
* Pseudo access flag used to denote constructors.
*/
- static final int ACC_CONSTRUCTOR = 262144;
+ static final int ACC_CONSTRUCTOR = 0x80000;
/**
* Frame has exactly the same locals as the previous stack map frame and
@@ -229,7 +229,7 @@ class MethodWriter extends MethodVisitor {
private int maxLocals;
/**
- * Number of local variables in the current stack map frame.
+ * Number of local variables in the current stack map frame.
*/
private int currentLocals;
@@ -257,11 +257,6 @@ class MethodWriter extends MethodVisitor {
private int[] previousFrame;
/**
- * Index of the next element to be added in {@link #frame}.
- */
- private int frameIndex;
-
- /**
* The current stack map frame. The first element contains the offset of the
* instruction to which the frame corresponds, the second element is the
* number of locals and the third one is the number of stack elements. The
@@ -357,7 +352,8 @@ class MethodWriter extends MethodVisitor {
* A list of labels. This list is the list of basic blocks in the method,
* i.e. a list of Label objects linked to each other by their
* {@link Label#successor} field, in the order they are visited by
- * {@link MethodVisitor#visitLabel}, and starting with the first basic block.
+ * {@link MethodVisitor#visitLabel}, and starting with the first basic
+ * block.
*/
private Label labels;
@@ -396,28 +392,30 @@ class MethodWriter extends MethodVisitor {
/**
* Constructs a new {@link MethodWriter}.
*
- * @param cw the class writer in which the method must be added.
- * @param access the method's access flags (see {@link Opcodes}).
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type}).
- * @param signature the method's signature. May be <tt>null</tt>.
- * @param exceptions the internal names of the method's exceptions. May be
- * <tt>null</tt>.
- * @param computeMaxs <tt>true</tt> if the maximum stack size and number
- * of local variables must be automatically computed.
- * @param computeFrames <tt>true</tt> if the stack map tables must be
- * recomputed from scratch.
- */
- MethodWriter(
- final ClassWriter cw,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions,
- final boolean computeMaxs,
- final boolean computeFrames)
- {
+ * @param cw
+ * the class writer in which the method must be added.
+ * @param access
+ * the method's access flags (see {@link Opcodes}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt>.
+ * @param exceptions
+ * the internal names of the method's exceptions. May be
+ * <tt>null</tt>.
+ * @param computeMaxs
+ * <tt>true</tt> if the maximum stack size and number of local
+ * variables must be automatically computed.
+ * @param computeFrames
+ * <tt>true</tt> if the stack map tables must be recomputed from
+ * scratch.
+ */
+ MethodWriter(final ClassWriter cw, final int access, final String name,
+ final String desc, final String signature,
+ final String[] exceptions, final boolean computeMaxs,
+ final boolean computeFrames) {
super(Opcodes.ASM4);
if (cw.firstMethod == null) {
cw.firstMethod = this;
@@ -427,6 +425,9 @@ class MethodWriter extends MethodVisitor {
cw.lastMethod = this;
this.cw = cw;
this.access = access;
+ if ("<init>".equals(name)) {
+ this.access |= ACC_CONSTRUCTOR;
+ }
this.name = cw.newUTF8(name);
this.desc = cw.newUTF8(desc);
this.descriptor = desc;
@@ -442,9 +443,6 @@ class MethodWriter extends MethodVisitor {
}
this.compute = computeFrames ? FRAMES : (computeMaxs ? MAXS : NOTHING);
if (computeMaxs || computeFrames) {
- if (computeFrames && "<init>".equals(name)) {
- this.access |= ACC_CONSTRUCTOR;
- }
// updates maxLocals
int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2;
if ((access & Opcodes.ACC_STATIC) != 0) {
@@ -473,10 +471,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -495,11 +491,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -545,20 +538,18 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
if (!ClassReader.FRAMES || compute == FRAMES) {
return;
}
if (type == Opcodes.F_NEW) {
+ if (previousFrame == null) {
+ visitImplicitFirstFrame();
+ }
currentLocals = nLocal;
- startFrame(code.length, nLocal, nStack);
+ int frameIndex = startFrame(code.length, nLocal, nStack);
for (int i = 0; i < nLocal; ++i) {
if (local[i] instanceof String) {
frame[frameIndex++] = Frame.OBJECT
@@ -601,48 +592,44 @@ class MethodWriter extends MethodVisitor {
}
switch (type) {
- case Opcodes.F_FULL:
- currentLocals = nLocal;
- stackMap.putByte(FULL_FRAME)
- .putShort(delta)
- .putShort(nLocal);
- for (int i = 0; i < nLocal; ++i) {
- writeFrameType(local[i]);
- }
- stackMap.putShort(nStack);
- for (int i = 0; i < nStack; ++i) {
- writeFrameType(stack[i]);
- }
- break;
- case Opcodes.F_APPEND:
- currentLocals += nLocal;
- stackMap.putByte(SAME_FRAME_EXTENDED + nLocal)
- .putShort(delta);
- for (int i = 0; i < nLocal; ++i) {
- writeFrameType(local[i]);
- }
- break;
- case Opcodes.F_CHOP:
- currentLocals -= nLocal;
- stackMap.putByte(SAME_FRAME_EXTENDED - nLocal)
+ case Opcodes.F_FULL:
+ currentLocals = nLocal;
+ stackMap.putByte(FULL_FRAME).putShort(delta).putShort(nLocal);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ stackMap.putShort(nStack);
+ for (int i = 0; i < nStack; ++i) {
+ writeFrameType(stack[i]);
+ }
+ break;
+ case Opcodes.F_APPEND:
+ currentLocals += nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED + nLocal).putShort(delta);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ break;
+ case Opcodes.F_CHOP:
+ currentLocals -= nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED - nLocal).putShort(delta);
+ break;
+ case Opcodes.F_SAME:
+ if (delta < 64) {
+ stackMap.putByte(delta);
+ } else {
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ }
+ break;
+ case Opcodes.F_SAME1:
+ if (delta < 64) {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ } else {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
.putShort(delta);
- break;
- case Opcodes.F_SAME:
- if (delta < 64) {
- stackMap.putByte(delta);
- } else {
- stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
- }
- break;
- case Opcodes.F_SAME1:
- if (delta < 64) {
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
- } else {
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
- .putShort(delta);
- }
- writeFrameType(stack[0]);
- break;
+ }
+ writeFrameType(stack[0]);
+ break;
}
previousFrameOffset = code.length;
@@ -672,8 +659,7 @@ class MethodWriter extends MethodVisitor {
}
// if opcode == ATHROW or xRETURN, ends current block (no successor)
if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN)
- || opcode == Opcodes.ATHROW)
- {
+ || opcode == Opcodes.ATHROW) {
noSuccessor();
}
}
@@ -731,8 +717,7 @@ class MethodWriter extends MethodVisitor {
// updates max locals
int n;
if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD
- || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE)
- {
+ || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE) {
n = var + 2;
} else {
n = var + 1;
@@ -784,12 +769,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
Item i = cw.newFieldItem(owner, name, desc);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
@@ -800,19 +781,19 @@ class MethodWriter extends MethodVisitor {
// computes the stack size variation
char c = desc.charAt(0);
switch (opcode) {
- case Opcodes.GETSTATIC:
- size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
- break;
- case Opcodes.PUTSTATIC:
- size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
- break;
- case Opcodes.GETFIELD:
- size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
- break;
- // case Constants.PUTFIELD:
- default:
- size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
- break;
+ case Opcodes.GETSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
+ break;
+ case Opcodes.PUTSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
+ break;
+ case Opcodes.GETFIELD:
+ size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
+ break;
+ // case Constants.PUTFIELD:
+ default:
+ size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
+ break;
}
// updates current and max stack sizes
if (size > maxStackSize) {
@@ -826,12 +807,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
boolean itf = opcode == Opcodes.INVOKEINTERFACE;
Item i = cw.newMethodItem(owner, name, desc, itf);
int argSize = i.intVal;
@@ -882,12 +859,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitInvokeDynamicInsn(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs);
int argSize = i.intVal;
// Label currentBlock = this.currentBlock;
@@ -967,8 +940,7 @@ class MethodWriter extends MethodVisitor {
}
// adds the instruction to the bytecode of the method
if ((label.status & Label.RESOLVED) != 0
- && label.position - code.length < Short.MIN_VALUE)
- {
+ && label.position - code.length < Short.MIN_VALUE) {
/*
* case of a backward jump with an offset < -32768. In this case we
* automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx
@@ -986,8 +958,7 @@ class MethodWriter extends MethodVisitor {
if (nextInsn != null) {
nextInsn.status |= Label.TARGET;
}
- code.putByte(opcode <= 166
- ? ((opcode + 1) ^ 1) - 1
+ code.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1
: opcode ^ 1);
code.putShort(8); // jump offset
code.putByte(200); // GOTO_W
@@ -1082,8 +1053,7 @@ class MethodWriter extends MethodVisitor {
} else {
int size;
// computes the stack size variation
- if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE)
- {
+ if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) {
size = stackSize + 2;
} else {
size = stackSize + 1;
@@ -1122,8 +1092,7 @@ class MethodWriter extends MethodVisitor {
}
// adds the instruction to the bytecode of the method
if ((var > 255) || (increment > 127) || (increment < -128)) {
- code.putByte(196 /* WIDE */)
- .put12(Opcodes.IINC, var)
+ code.putByte(196 /* WIDE */).put12(Opcodes.IINC, var)
.putShort(increment);
} else {
code.putByte(Opcodes.IINC).put11(var, increment);
@@ -1131,12 +1100,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.TABLESWITCH);
@@ -1151,11 +1116,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.LOOKUPSWITCH);
@@ -1214,12 +1176,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
++handlerCount;
Handler h = new Handler();
h.start = start;
@@ -1236,14 +1194,9 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
if (signature != null) {
if (localVarType == null) {
localVarType = new ByteVector();
@@ -1251,8 +1204,7 @@ class MethodWriter extends MethodVisitor {
++localVarTypeCount;
localVarType.putShort(start.position)
.putShort(end.position - start.position)
- .putShort(cw.newUTF8(name))
- .putShort(cw.newUTF8(signature))
+ .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(signature))
.putShort(index);
}
if (localVar == null) {
@@ -1261,8 +1213,7 @@ class MethodWriter extends MethodVisitor {
++localVarCount;
localVar.putShort(start.position)
.putShort(end.position - start.position)
- .putShort(cw.newUTF8(name))
- .putShort(cw.newUTF8(desc))
+ .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(desc))
.putShort(index);
if (compute != NOTHING) {
// updates max locals
@@ -1294,8 +1245,7 @@ class MethodWriter extends MethodVisitor {
Label h = handler.handler.getFirst();
Label e = handler.end.getFirst();
// computes the kind of the edges to 'h'
- String t = handler.desc == null
- ? "java/lang/Throwable"
+ String t = handler.desc == null ? "java/lang/Throwable"
: handler.desc;
int kind = Frame.OBJECT | cw.addType(t);
// h is an exception handler
@@ -1382,11 +1332,12 @@ class MethodWriter extends MethodVisitor {
}
code.data[end] = (byte) Opcodes.ATHROW;
// emits a frame for this unreachable block
- startFrame(start, 0, 1);
- frame[frameIndex++] = Frame.OBJECT
+ int frameIndex = startFrame(start, 0, 1);
+ frame[frameIndex] = Frame.OBJECT
| cw.addType("java/lang/Throwable");
endFrame();
- // removes the start-end range from the exception handlers
+ // removes the start-end range from the exception
+ // handlers
firstHandler = Handler.remove(firstHandler, l, k);
}
}
@@ -1535,8 +1486,10 @@ class MethodWriter extends MethodVisitor {
/**
* Adds a successor to the {@link #currentBlock currentBlock} block.
*
- * @param info information about the control flow edge to be added.
- * @param successor the successor block to be added to the current block.
+ * @param info
+ * information about the control flow edge to be added.
+ * @param successor
+ * the successor block to be added to the current block.
*/
private void addSuccessor(final int info, final Label successor) {
// creates and initializes an Edge object...
@@ -1573,7 +1526,8 @@ class MethodWriter extends MethodVisitor {
/**
* Visits a frame that has been computed from scratch.
*
- * @param f the frame that must be visited.
+ * @param f
+ * the frame that must be visited.
*/
private void visitFrame(final Frame f) {
int i, t;
@@ -1606,7 +1560,7 @@ class MethodWriter extends MethodVisitor {
}
}
// visits the frame and its content
- startFrame(f.owner.position, nLocal, nStack);
+ int frameIndex = startFrame(f.owner.position, nLocal, nStack);
for (i = 0; nLocal > 0; ++i, --nLocal) {
t = locals[i];
frame[frameIndex++] = t;
@@ -1625,15 +1579,78 @@ class MethodWriter extends MethodVisitor {
}
/**
+ * Visit the implicit first frame of this method.
+ */
+ private void visitImplicitFirstFrame() {
+ // There can be at most descriptor.length() + 1 locals
+ int frameIndex = startFrame(0, descriptor.length() + 1, 0);
+ if ((access & Opcodes.ACC_STATIC) == 0) {
+ if ((access & ACC_CONSTRUCTOR) == 0) {
+ frame[frameIndex++] = Frame.OBJECT | cw.addType(cw.thisName);
+ } else {
+ frame[frameIndex++] = 6; // Opcodes.UNINITIALIZED_THIS;
+ }
+ }
+ int i = 1;
+ loop: while (true) {
+ int j = i;
+ switch (descriptor.charAt(i++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ frame[frameIndex++] = 1; // Opcodes.INTEGER;
+ break;
+ case 'F':
+ frame[frameIndex++] = 2; // Opcodes.FLOAT;
+ break;
+ case 'J':
+ frame[frameIndex++] = 4; // Opcodes.LONG;
+ break;
+ case 'D':
+ frame[frameIndex++] = 3; // Opcodes.DOUBLE;
+ break;
+ case '[':
+ while (descriptor.charAt(i) == '[') {
+ ++i;
+ }
+ if (descriptor.charAt(i) == 'L') {
+ ++i;
+ while (descriptor.charAt(i) != ';') {
+ ++i;
+ }
+ }
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType(descriptor.substring(j, ++i));
+ break;
+ case 'L':
+ while (descriptor.charAt(i) != ';') {
+ ++i;
+ }
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType(descriptor.substring(j + 1, i++));
+ break;
+ default:
+ break loop;
+ }
+ }
+ frame[1] = frameIndex - 3;
+ endFrame();
+ }
+
+ /**
* Starts the visit of a stack map frame.
*
- * @param offset the offset of the instruction to which the frame
- * corresponds.
- * @param nLocal the number of local variables in the frame.
- * @param nStack the number of stack elements in the frame.
- */
- private void startFrame(final int offset, final int nLocal, final int nStack)
- {
+ * @param offset
+ * the offset of the instruction to which the frame corresponds.
+ * @param nLocal
+ * the number of local variables in the frame.
+ * @param nStack
+ * the number of stack elements in the frame.
+ * @return the index of the next element to be written in this frame.
+ */
+ private int startFrame(final int offset, final int nLocal, final int nStack) {
int n = 3 + nLocal + nStack;
if (frame == null || frame.length < n) {
frame = new int[n];
@@ -1641,7 +1658,7 @@ class MethodWriter extends MethodVisitor {
frame[0] = offset;
frame[1] = nLocal;
frame[2] = nStack;
- frameIndex = 3;
+ return 3;
}
/**
@@ -1686,24 +1703,23 @@ class MethodWriter extends MethodVisitor {
if (cstackSize == 0) {
k = clocalsSize - localsSize;
switch (k) {
- case -3:
- case -2:
- case -1:
- type = CHOP_FRAME;
- localsSize = clocalsSize;
- break;
- case 0:
- type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
- break;
- case 1:
- case 2:
- case 3:
- type = APPEND_FRAME;
- break;
+ case -3:
+ case -2:
+ case -1:
+ type = CHOP_FRAME;
+ localsSize = clocalsSize;
+ break;
+ case 0:
+ type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
+ break;
+ case 1:
+ case 2:
+ case 3:
+ type = APPEND_FRAME;
+ break;
}
} else if (clocalsSize == localsSize && cstackSize == 1) {
- type = delta < 63
- ? SAME_LOCALS_1_STACK_ITEM_FRAME
+ type = delta < 63 ? SAME_LOCALS_1_STACK_ITEM_FRAME
: SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED;
}
if (type != FULL_FRAME) {
@@ -1718,36 +1734,34 @@ class MethodWriter extends MethodVisitor {
}
}
switch (type) {
- case SAME_FRAME:
- stackMap.putByte(delta);
- break;
- case SAME_LOCALS_1_STACK_ITEM_FRAME:
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
- writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
- break;
- case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
- .putShort(delta);
- writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
- break;
- case SAME_FRAME_EXTENDED:
- stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
- break;
- case CHOP_FRAME:
- stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
- break;
- case APPEND_FRAME:
- stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
- writeFrameTypes(3 + localsSize, 3 + clocalsSize);
- break;
- // case FULL_FRAME:
- default:
- stackMap.putByte(FULL_FRAME)
- .putShort(delta)
- .putShort(clocalsSize);
- writeFrameTypes(3, 3 + clocalsSize);
- stackMap.putShort(cstackSize);
- writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
+ case SAME_FRAME:
+ stackMap.putByte(delta);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED).putShort(
+ delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_FRAME_EXTENDED:
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ break;
+ case CHOP_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ break;
+ case APPEND_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ writeFrameTypes(3 + localsSize, 3 + clocalsSize);
+ break;
+ // case FULL_FRAME:
+ default:
+ stackMap.putByte(FULL_FRAME).putShort(delta).putShort(clocalsSize);
+ writeFrameTypes(3, 3 + clocalsSize);
+ stackMap.putShort(cstackSize);
+ writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
}
}
@@ -1757,8 +1771,10 @@ class MethodWriter extends MethodVisitor {
* in {@link Label} to the format used in StackMapTable attributes. In
* particular, it converts type table indexes to constant pool indexes.
*
- * @param start index of the first type in {@link #frame} to write.
- * @param end index of last type in {@link #frame} to write (exclusive).
+ * @param start
+ * index of the first type in {@link #frame} to write.
+ * @param end
+ * index of last type in {@link #frame} to write (exclusive).
*/
private void writeFrameTypes(final int start, final int end) {
for (int i = start; i < end; ++i) {
@@ -1767,15 +1783,15 @@ class MethodWriter extends MethodVisitor {
if (d == 0) {
int v = t & Frame.BASE_VALUE;
switch (t & Frame.BASE_KIND) {
- case Frame.OBJECT:
- stackMap.putByte(7)
- .putShort(cw.newClass(cw.typeTable[v].strVal1));
- break;
- case Frame.UNINITIALIZED:
- stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
- break;
- default:
- stackMap.putByte(v);
+ case Frame.OBJECT:
+ stackMap.putByte(7).putShort(
+ cw.newClass(cw.typeTable[v].strVal1));
+ break;
+ case Frame.UNINITIALIZED:
+ stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
+ break;
+ default:
+ stackMap.putByte(v);
}
} else {
StringBuffer buf = new StringBuffer();
@@ -1789,29 +1805,29 @@ class MethodWriter extends MethodVisitor {
buf.append(';');
} else {
switch (t & 0xF) {
- case 1:
- buf.append('I');
- break;
- case 2:
- buf.append('F');
- break;
- case 3:
- buf.append('D');
- break;
- case 9:
- buf.append('Z');
- break;
- case 10:
- buf.append('B');
- break;
- case 11:
- buf.append('C');
- break;
- case 12:
- buf.append('S');
- break;
- default:
- buf.append('J');
+ case 1:
+ buf.append('I');
+ break;
+ case 2:
+ buf.append('F');
+ break;
+ case 3:
+ buf.append('D');
+ break;
+ case 9:
+ buf.append('Z');
+ break;
+ case 10:
+ buf.append('B');
+ break;
+ case 11:
+ buf.append('C');
+ break;
+ case 12:
+ buf.append('S');
+ break;
+ default:
+ buf.append('J');
}
}
stackMap.putByte(7).putShort(cw.newClass(buf.toString()));
@@ -1875,10 +1891,7 @@ class MethodWriter extends MethodVisitor {
size += 8 + stackMap.length;
}
if (cattrs != null) {
- size += cattrs.getSize(cw,
- code.data,
- code.length,
- maxStack,
+ size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
}
}
@@ -1886,11 +1899,12 @@ class MethodWriter extends MethodVisitor {
cw.newUTF8("Exceptions");
size += 8 + 2 * exceptionCount;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- cw.newUTF8("Synthetic");
- size += 6;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
cw.newUTF8("Deprecated");
@@ -1936,13 +1950,15 @@ class MethodWriter extends MethodVisitor {
/**
* Puts the bytecode of this method in the given byte vector.
*
- * @param out the byte vector into which the bytecode of this method must be
- * copied.
+ * @param out
+ * the byte vector into which the bytecode of this method must be
+ * copied.
*/
final void put(final ByteVector out) {
- int mask = Opcodes.ACC_DEPRECATED
+ final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
+ int mask = ACC_CONSTRUCTOR | Opcodes.ACC_DEPRECATED
| ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
out.putShort(access & ~mask).putShort(name).putShort(desc);
if (classReaderOffset != 0) {
out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength);
@@ -1955,10 +1971,11 @@ class MethodWriter extends MethodVisitor {
if (exceptionCount > 0) {
++attributeCount;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- ++attributeCount;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ ++attributeCount;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
++attributeCount;
@@ -2000,10 +2017,7 @@ class MethodWriter extends MethodVisitor {
size += 8 + stackMap.length;
}
if (cattrs != null) {
- size += cattrs.getSize(cw,
- code.data,
- code.length,
- maxStack,
+ size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
}
out.putShort(cw.newUTF8("Code")).putInt(size);
@@ -2013,10 +2027,8 @@ class MethodWriter extends MethodVisitor {
if (handlerCount > 0) {
Handler h = firstHandler;
while (h != null) {
- out.putShort(h.start.position)
- .putShort(h.end.position)
- .putShort(h.handler.position)
- .putShort(h.type);
+ out.putShort(h.start.position).putShort(h.end.position)
+ .putShort(h.handler.position).putShort(h.type);
h = h.next;
}
}
@@ -2063,24 +2075,24 @@ class MethodWriter extends MethodVisitor {
}
}
if (exceptionCount > 0) {
- out.putShort(cw.newUTF8("Exceptions"))
- .putInt(2 * exceptionCount + 2);
+ out.putShort(cw.newUTF8("Exceptions")).putInt(
+ 2 * exceptionCount + 2);
out.putShort(exceptionCount);
for (int i = 0; i < exceptionCount; ++i) {
out.putShort(exceptions[i]);
}
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(cw.newUTF8("Deprecated")).putInt(0);
}
if (ClassReader.SIGNATURES && signature != null) {
- out.putShort(cw.newUTF8("Signature"))
- .putInt(2)
+ out.putShort(cw.newUTF8("Signature")).putInt(2)
.putShort(cw.newUTF8(signature));
}
if (ClassReader.ANNOTATIONS && annd != null) {
@@ -2123,10 +2135,12 @@ class MethodWriter extends MethodVisitor {
* 32768, in which case IFEQ 32766 must be replaced with IFNEQ 8 GOTO_W
* 32765. This, in turn, may require to increase the size of another jump
* instruction, and so on... All these operations are handled automatically
- * by this method. <p> <i>This method must be called after all the method
- * that is being built has been visited</i>. In particular, the
- * {@link Label Label} objects used to construct the method are no longer
- * valid after this method has been called.
+ * by this method.
+ * <p>
+ * <i>This method must be called after all the method that is being built
+ * has been visited</i>. In particular, the {@link Label Label} objects used
+ * to construct the method are no longer valid after this method has been
+ * called.
*/
private void resizeInstructions() {
byte[] b = code.data; // bytecode of the method
@@ -2176,158 +2190,14 @@ class MethodWriter extends MethodVisitor {
int insert = 0; // bytes to be added after this instruction
switch (ClassWriter.TYPE[opcode]) {
- case ClassWriter.NOARG_INSN:
- case ClassWriter.IMPLVAR_INSN:
- u += 1;
- break;
- case ClassWriter.LABEL_INSN:
- if (opcode > 201) {
- // converts temporary opcodes 202 to 217, 218 and
- // 219 to IFEQ ... JSR (inclusive), IFNULL and
- // IFNONNULL
- opcode = opcode < 218 ? opcode - 49 : opcode - 20;
- label = u + readUnsignedShort(b, u + 1);
- } else {
- label = u + readShort(b, u + 1);
- }
- newOffset = getNewOffset(allIndexes, allSizes, u, label);
- if (newOffset < Short.MIN_VALUE
- || newOffset > Short.MAX_VALUE)
- {
- if (!resize[u]) {
- if (opcode == Opcodes.GOTO
- || opcode == Opcodes.JSR)
- {
- // two additional bytes will be required to
- // replace this GOTO or JSR instruction with
- // a GOTO_W or a JSR_W
- insert = 2;
- } else {
- // five additional bytes will be required to
- // replace this IFxxx <l> instruction with
- // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
- // is the "opposite" opcode of IFxxx (i.e.,
- // IFNE for IFEQ) and where <l'> designates
- // the instruction just after the GOTO_W.
- insert = 5;
- }
- resize[u] = true;
- }
- }
- u += 3;
- break;
- case ClassWriter.LABELW_INSN:
- u += 5;
- break;
- case ClassWriter.TABL_INSN:
- if (state == 1) {
- // true number of bytes to be added (or removed)
- // from this instruction = (future number of padding
- // bytes - current number of padding byte) -
- // previously over estimated variation =
- // = ((3 - newOffset%4) - (3 - u%4)) - u%4
- // = (-newOffset%4 + u%4) - u%4
- // = -(newOffset & 3)
- newOffset = getNewOffset(allIndexes, allSizes, 0, u);
- insert = -(newOffset & 3);
- } else if (!resize[u]) {
- // over estimation of the number of bytes to be
- // added to this instruction = 3 - current number
- // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
- insert = u & 3;
- resize[u] = true;
- }
- // skips instruction
- u = u + 4 - (u & 3);
- u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
- break;
- case ClassWriter.LOOK_INSN:
- if (state == 1) {
- // like TABL_INSN
- newOffset = getNewOffset(allIndexes, allSizes, 0, u);
- insert = -(newOffset & 3);
- } else if (!resize[u]) {
- // like TABL_INSN
- insert = u & 3;
- resize[u] = true;
- }
- // skips instruction
- u = u + 4 - (u & 3);
- u += 8 * readInt(b, u + 4) + 8;
- break;
- case ClassWriter.WIDE_INSN:
- opcode = b[u + 1] & 0xFF;
- if (opcode == Opcodes.IINC) {
- u += 6;
- } else {
- u += 4;
- }
- break;
- case ClassWriter.VAR_INSN:
- case ClassWriter.SBYTE_INSN:
- case ClassWriter.LDC_INSN:
- u += 2;
- break;
- case ClassWriter.SHORT_INSN:
- case ClassWriter.LDCW_INSN:
- case ClassWriter.FIELDORMETH_INSN:
- case ClassWriter.TYPE_INSN:
- case ClassWriter.IINC_INSN:
- u += 3;
- break;
- case ClassWriter.ITFMETH_INSN:
- case ClassWriter.INDYMETH_INSN:
- u += 5;
- break;
- // case ClassWriter.MANA_INSN:
- default:
- u += 4;
- break;
- }
- if (insert != 0) {
- // adds a new (u, insert) entry in the allIndexes and
- // allSizes arrays
- int[] newIndexes = new int[allIndexes.length + 1];
- int[] newSizes = new int[allSizes.length + 1];
- System.arraycopy(allIndexes,
- 0,
- newIndexes,
- 0,
- allIndexes.length);
- System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
- newIndexes[allIndexes.length] = u;
- newSizes[allSizes.length] = insert;
- allIndexes = newIndexes;
- allSizes = newSizes;
- if (insert > 0) {
- state = 3;
- }
- }
- }
- if (state < 3) {
- --state;
- }
- } while (state != 0);
-
- // 2nd step:
- // copies the bytecode of the method into a new bytevector, updates the
- // offsets, and inserts (or removes) bytes as requested.
-
- ByteVector newCode = new ByteVector(code.length);
-
- u = 0;
- while (u < code.length) {
- int opcode = b[u] & 0xFF;
- switch (ClassWriter.TYPE[opcode]) {
case ClassWriter.NOARG_INSN:
case ClassWriter.IMPLVAR_INSN:
- newCode.putByte(opcode);
u += 1;
break;
case ClassWriter.LABEL_INSN:
if (opcode > 201) {
- // changes temporary opcodes 202 to 217 (inclusive), 218
- // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // converts temporary opcodes 202 to 217, 218 and
+ // 219 to IFEQ ... JSR (inclusive), IFNULL and
// IFNONNULL
opcode = opcode < 218 ? opcode - 49 : opcode - 20;
label = u + readUnsignedShort(b, u + 1);
@@ -2335,100 +2205,78 @@ class MethodWriter extends MethodVisitor {
label = u + readShort(b, u + 1);
}
newOffset = getNewOffset(allIndexes, allSizes, u, label);
- if (resize[u]) {
- // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
- // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
- // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
- // and where <l'> designates the instruction just after
- // the GOTO_W.
- if (opcode == Opcodes.GOTO) {
- newCode.putByte(200); // GOTO_W
- } else if (opcode == Opcodes.JSR) {
- newCode.putByte(201); // JSR_W
- } else {
- newCode.putByte(opcode <= 166
- ? ((opcode + 1) ^ 1) - 1
- : opcode ^ 1);
- newCode.putShort(8); // jump offset
- newCode.putByte(200); // GOTO_W
- // newOffset now computed from start of GOTO_W
- newOffset -= 3;
+ if (newOffset < Short.MIN_VALUE
+ || newOffset > Short.MAX_VALUE) {
+ if (!resize[u]) {
+ if (opcode == Opcodes.GOTO || opcode == Opcodes.JSR) {
+ // two additional bytes will be required to
+ // replace this GOTO or JSR instruction with
+ // a GOTO_W or a JSR_W
+ insert = 2;
+ } else {
+ // five additional bytes will be required to
+ // replace this IFxxx <l> instruction with
+ // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
+ // is the "opposite" opcode of IFxxx (i.e.,
+ // IFNE for IFEQ) and where <l'> designates
+ // the instruction just after the GOTO_W.
+ insert = 5;
+ }
+ resize[u] = true;
}
- newCode.putInt(newOffset);
- } else {
- newCode.putByte(opcode);
- newCode.putShort(newOffset);
}
u += 3;
break;
case ClassWriter.LABELW_INSN:
- label = u + readInt(b, u + 1);
- newOffset = getNewOffset(allIndexes, allSizes, u, label);
- newCode.putByte(opcode);
- newCode.putInt(newOffset);
u += 5;
break;
case ClassWriter.TABL_INSN:
- // skips 0 to 3 padding bytes
- v = u;
- u = u + 4 - (v & 3);
- // reads and copies instruction
- newCode.putByte(Opcodes.TABLESWITCH);
- newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
- j = readInt(b, u);
- u += 4;
- newCode.putInt(j);
- j = readInt(b, u) - j + 1;
- u += 4;
- newCode.putInt(readInt(b, u - 4));
- for (; j > 0; --j) {
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
+ if (state == 1) {
+ // true number of bytes to be added (or removed)
+ // from this instruction = (future number of padding
+ // bytes - current number of padding byte) -
+ // previously over estimated variation =
+ // = ((3 - newOffset%4) - (3 - u%4)) - u%4
+ // = (-newOffset%4 + u%4) - u%4
+ // = -(newOffset & 3)
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // over estimation of the number of bytes to be
+ // added to this instruction = 3 - current number
+ // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
+ insert = u & 3;
+ resize[u] = true;
}
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
break;
case ClassWriter.LOOK_INSN:
- // skips 0 to 3 padding bytes
- v = u;
- u = u + 4 - (v & 3);
- // reads and copies instruction
- newCode.putByte(Opcodes.LOOKUPSWITCH);
- newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
- j = readInt(b, u);
- u += 4;
- newCode.putInt(j);
- for (; j > 0; --j) {
- newCode.putInt(readInt(b, u));
- u += 4;
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
+ if (state == 1) {
+ // like TABL_INSN
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // like TABL_INSN
+ insert = u & 3;
+ resize[u] = true;
}
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 8 * readInt(b, u + 4) + 8;
break;
case ClassWriter.WIDE_INSN:
opcode = b[u + 1] & 0xFF;
if (opcode == Opcodes.IINC) {
- newCode.putByteArray(b, u, 6);
u += 6;
} else {
- newCode.putByteArray(b, u, 4);
u += 4;
}
break;
case ClassWriter.VAR_INSN:
case ClassWriter.SBYTE_INSN:
case ClassWriter.LDC_INSN:
- newCode.putByteArray(b, u, 2);
u += 2;
break;
case ClassWriter.SHORT_INSN:
@@ -2436,19 +2284,178 @@ class MethodWriter extends MethodVisitor {
case ClassWriter.FIELDORMETH_INSN:
case ClassWriter.TYPE_INSN:
case ClassWriter.IINC_INSN:
- newCode.putByteArray(b, u, 3);
u += 3;
break;
case ClassWriter.ITFMETH_INSN:
case ClassWriter.INDYMETH_INSN:
- newCode.putByteArray(b, u, 5);
u += 5;
break;
- // case MANA_INSN:
+ // case ClassWriter.MANA_INSN:
default:
- newCode.putByteArray(b, u, 4);
u += 4;
break;
+ }
+ if (insert != 0) {
+ // adds a new (u, insert) entry in the allIndexes and
+ // allSizes arrays
+ int[] newIndexes = new int[allIndexes.length + 1];
+ int[] newSizes = new int[allSizes.length + 1];
+ System.arraycopy(allIndexes, 0, newIndexes, 0,
+ allIndexes.length);
+ System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
+ newIndexes[allIndexes.length] = u;
+ newSizes[allSizes.length] = insert;
+ allIndexes = newIndexes;
+ allSizes = newSizes;
+ if (insert > 0) {
+ state = 3;
+ }
+ }
+ }
+ if (state < 3) {
+ --state;
+ }
+ } while (state != 0);
+
+ // 2nd step:
+ // copies the bytecode of the method into a new bytevector, updates the
+ // offsets, and inserts (or removes) bytes as requested.
+
+ ByteVector newCode = new ByteVector(code.length);
+
+ u = 0;
+ while (u < code.length) {
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ newCode.putByte(opcode);
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ if (opcode > 201) {
+ // changes temporary opcodes 202 to 217 (inclusive), 218
+ // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // IFNONNULL
+ opcode = opcode < 218 ? opcode - 49 : opcode - 20;
+ label = u + readUnsignedShort(b, u + 1);
+ } else {
+ label = u + readShort(b, u + 1);
+ }
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ if (resize[u]) {
+ // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
+ // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
+ // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
+ // and where <l'> designates the instruction just after
+ // the GOTO_W.
+ if (opcode == Opcodes.GOTO) {
+ newCode.putByte(200); // GOTO_W
+ } else if (opcode == Opcodes.JSR) {
+ newCode.putByte(201); // JSR_W
+ } else {
+ newCode.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1
+ : opcode ^ 1);
+ newCode.putShort(8); // jump offset
+ newCode.putByte(200); // GOTO_W
+ // newOffset now computed from start of GOTO_W
+ newOffset -= 3;
+ }
+ newCode.putInt(newOffset);
+ } else {
+ newCode.putByte(opcode);
+ newCode.putShort(newOffset);
+ }
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ label = u + readInt(b, u + 1);
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ newCode.putByte(opcode);
+ newCode.putInt(newOffset);
+ u += 5;
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.TABLESWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ j = readInt(b, u) - j + 1;
+ u += 4;
+ newCode.putInt(readInt(b, u - 4));
+ for (; j > 0; --j) {
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.LOOKUPSWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ for (; j > 0; --j) {
+ newCode.putInt(readInt(b, u));
+ u += 4;
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ newCode.putByteArray(b, u, 6);
+ u += 6;
+ } else {
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ newCode.putByteArray(b, u, 2);
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ newCode.putByteArray(b, u, 3);
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ newCode.putByteArray(b, u, 5);
+ u += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ break;
}
}
@@ -2471,8 +2478,7 @@ class MethodWriter extends MethodVisitor {
* must therefore never have been called for this label.
*/
u = l.position - 3;
- if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u]))
- {
+ if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u])) {
getNewOffset(allIndexes, allSizes, l);
// TODO update offsets in UNINITIALIZED values
visitFrame(l.frame);
@@ -2528,10 +2534,11 @@ class MethodWriter extends MethodVisitor {
b = lineNumber.data;
u = 0;
while (u < lineNumber.length) {
- writeShort(b, u, getNewOffset(allIndexes,
- allSizes,
- 0,
- readUnsignedShort(b, u)));
+ writeShort(
+ b,
+ u,
+ getNewOffset(allIndexes, allSizes, 0,
+ readUnsignedShort(b, u)));
u += 4;
}
}
@@ -2554,8 +2561,10 @@ class MethodWriter extends MethodVisitor {
/**
* Reads an unsigned short value in the given byte array.
*
- * @param b a byte array.
- * @param index the start index of the value to be read.
+ * @param b
+ * a byte array.
+ * @param index
+ * the start index of the value to be read.
* @return the read value.
*/
static int readUnsignedShort(final byte[] b, final int index) {
@@ -2565,8 +2574,10 @@ class MethodWriter extends MethodVisitor {
/**
* Reads a signed short value in the given byte array.
*
- * @param b a byte array.
- * @param index the start index of the value to be read.
+ * @param b
+ * a byte array.
+ * @param index
+ * the start index of the value to be read.
* @return the read value.
*/
static short readShort(final byte[] b, final int index) {
@@ -2576,8 +2587,10 @@ class MethodWriter extends MethodVisitor {
/**
* Reads a signed int value in the given byte array.
*
- * @param b a byte array.
- * @param index the start index of the value to be read.
+ * @param b
+ * a byte array.
+ * @param index
+ * the start index of the value to be read.
* @return the read value.
*/
static int readInt(final byte[] b, final int index) {
@@ -2588,9 +2601,12 @@ class MethodWriter extends MethodVisitor {
/**
* Writes a short value in the given byte array.
*
- * @param b a byte array.
- * @param index where the first byte of the short value must be written.
- * @param s the value to be written in the given byte array.
+ * @param b
+ * a byte array.
+ * @param index
+ * where the first byte of the short value must be written.
+ * @param s
+ * the value to be written in the given byte array.
*/
static void writeShort(final byte[] b, final int index, final int s) {
b[index] = (byte) (s >>> 8);
@@ -2598,32 +2614,34 @@ class MethodWriter extends MethodVisitor {
}
/**
- * Computes the future value of a bytecode offset. <p> Note: it is possible
- * to have several entries for the same instruction in the <tt>indexes</tt>
- * and <tt>sizes</tt>: two entries (index=a,size=b) and (index=a,size=b')
- * are equivalent to a single entry (index=a,size=b+b').
+ * Computes the future value of a bytecode offset.
+ * <p>
+ * Note: it is possible to have several entries for the same instruction in
+ * the <tt>indexes</tt> and <tt>sizes</tt>: two entries (index=a,size=b) and
+ * (index=a,size=b') are equivalent to a single entry (index=a,size=b+b').
*
- * @param indexes current positions of the instructions to be resized. Each
- * instruction must be designated by the index of its <i>last</i>
- * byte, plus one (or, in other words, by the index of the <i>first</i>
- * byte of the <i>next</i> instruction).
- * @param sizes the number of bytes to be <i>added</i> to the above
- * instructions. More precisely, for each i < <tt>len</tt>,
- * <tt>sizes</tt>[i] bytes will be added at the end of the
- * instruction designated by <tt>indexes</tt>[i] or, if
- * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
- * bytes of the instruction will be removed (the instruction size
- * <i>must not</i> become negative or null).
- * @param begin index of the first byte of the source instruction.
- * @param end index of the first byte of the target instruction.
+ * @param indexes
+ * current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the
+ * <i>first</i> byte of the <i>next</i> instruction).
+ * @param sizes
+ * the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |
+ * <tt>sizes[i]</tt>| bytes of the instruction will be removed
+ * (the instruction size <i>must not</i> become negative or
+ * null).
+ * @param begin
+ * index of the first byte of the source instruction.
+ * @param end
+ * index of the first byte of the target instruction.
* @return the future value of the given bytecode offset.
*/
- static int getNewOffset(
- final int[] indexes,
- final int[] sizes,
- final int begin,
- final int end)
- {
+ static int getNewOffset(final int[] indexes, final int[] sizes,
+ final int begin, final int end) {
int offset = end - begin;
for (int i = 0; i < indexes.length; ++i) {
if (begin < indexes[i] && indexes[i] <= end) {
@@ -2640,24 +2658,25 @@ class MethodWriter extends MethodVisitor {
/**
* Updates the offset of the given label.
*
- * @param indexes current positions of the instructions to be resized. Each
- * instruction must be designated by the index of its <i>last</i>
- * byte, plus one (or, in other words, by the index of the <i>first</i>
- * byte of the <i>next</i> instruction).
- * @param sizes the number of bytes to be <i>added</i> to the above
- * instructions. More precisely, for each i < <tt>len</tt>,
- * <tt>sizes</tt>[i] bytes will be added at the end of the
- * instruction designated by <tt>indexes</tt>[i] or, if
- * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
- * bytes of the instruction will be removed (the instruction size
- * <i>must not</i> become negative or null).
- * @param label the label whose offset must be updated.
- */
- static void getNewOffset(
- final int[] indexes,
- final int[] sizes,
- final Label label)
- {
+ * @param indexes
+ * current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the
+ * <i>first</i> byte of the <i>next</i> instruction).
+ * @param sizes
+ * the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |
+ * <tt>sizes[i]</tt>| bytes of the instruction will be removed
+ * (the instruction size <i>must not</i> become negative or
+ * null).
+ * @param label
+ * the label whose offset must be updated.
+ */
+ static void getNewOffset(final int[] indexes, final int[] sizes,
+ final Label label) {
if ((label.status & Label.RESIZED) == 0) {
label.position = getNewOffset(indexes, sizes, 0, label.position);
label.status |= Label.RESIZED;
diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java
index bf1107182a..7821a492e6 100644
--- a/src/asm/scala/tools/asm/Type.java
+++ b/src/asm/scala/tools/asm/Type.java
@@ -190,13 +190,16 @@ public class Type {
/**
* Constructs a reference type.
*
- * @param sort the sort of the reference type to be constructed.
- * @param buf a buffer containing the descriptor of the previous type.
- * @param off the offset of this descriptor in the previous buffer.
- * @param len the length of this descriptor.
- */
- private Type(final int sort, final char[] buf, final int off, final int len)
- {
+ * @param sort
+ * the sort of the reference type to be constructed.
+ * @param buf
+ * a buffer containing the descriptor of the previous type.
+ * @param off
+ * the offset of this descriptor in the previous buffer.
+ * @param len
+ * the length of this descriptor.
+ */
+ private Type(final int sort, final char[] buf, final int off, final int len) {
this.sort = sort;
this.buf = buf;
this.off = off;
@@ -206,7 +209,8 @@ public class Type {
/**
* Returns the Java type corresponding to the given type descriptor.
*
- * @param typeDescriptor a field or method type descriptor.
+ * @param typeDescriptor
+ * a field or method type descriptor.
* @return the Java type corresponding to the given type descriptor.
*/
public static Type getType(final String typeDescriptor) {
@@ -216,7 +220,8 @@ public class Type {
/**
* Returns the Java type corresponding to the given internal name.
*
- * @param internalName an internal name.
+ * @param internalName
+ * an internal name.
* @return the Java type corresponding to the given internal name.
*/
public static Type getObjectType(final String internalName) {
@@ -228,7 +233,8 @@ public class Type {
* Returns the Java type corresponding to the given method descriptor.
* Equivalent to <code>Type.getType(methodDescriptor)</code>.
*
- * @param methodDescriptor a method descriptor.
+ * @param methodDescriptor
+ * a method descriptor.
* @return the Java type corresponding to the given method descriptor.
*/
public static Type getMethodType(final String methodDescriptor) {
@@ -239,18 +245,23 @@ public class Type {
* Returns the Java method type corresponding to the given argument and
* return types.
*
- * @param returnType the return type of the method.
- * @param argumentTypes the argument types of the method.
- * @return the Java type corresponding to the given argument and return types.
+ * @param returnType
+ * the return type of the method.
+ * @param argumentTypes
+ * the argument types of the method.
+ * @return the Java type corresponding to the given argument and return
+ * types.
*/
- public static Type getMethodType(final Type returnType, final Type... argumentTypes) {
+ public static Type getMethodType(final Type returnType,
+ final Type... argumentTypes) {
return getType(getMethodDescriptor(returnType, argumentTypes));
}
/**
* Returns the Java type corresponding to the given class.
*
- * @param c a class.
+ * @param c
+ * a class.
* @return the Java type corresponding to the given class.
*/
public static Type getType(final Class<?> c) {
@@ -282,7 +293,8 @@ public class Type {
/**
* Returns the Java method type corresponding to the given constructor.
*
- * @param c a {@link Constructor Constructor} object.
+ * @param c
+ * a {@link Constructor Constructor} object.
* @return the Java method type corresponding to the given constructor.
*/
public static Type getType(final Constructor<?> c) {
@@ -292,7 +304,8 @@ public class Type {
/**
* Returns the Java method type corresponding to the given method.
*
- * @param m a {@link Method Method} object.
+ * @param m
+ * a {@link Method Method} object.
* @return the Java method type corresponding to the given method.
*/
public static Type getType(final Method m) {
@@ -303,7 +316,8 @@ public class Type {
* Returns the Java types corresponding to the argument types of the given
* method descriptor.
*
- * @param methodDescriptor a method descriptor.
+ * @param methodDescriptor
+ * a method descriptor.
* @return the Java types corresponding to the argument types of the given
* method descriptor.
*/
@@ -338,7 +352,8 @@ public class Type {
* Returns the Java types corresponding to the argument types of the given
* method.
*
- * @param method a method.
+ * @param method
+ * a method.
* @return the Java types corresponding to the argument types of the given
* method.
*/
@@ -355,7 +370,8 @@ public class Type {
* Returns the Java type corresponding to the return type of the given
* method descriptor.
*
- * @param methodDescriptor a method descriptor.
+ * @param methodDescriptor
+ * a method descriptor.
* @return the Java type corresponding to the return type of the given
* method descriptor.
*/
@@ -368,7 +384,8 @@ public class Type {
* Returns the Java type corresponding to the return type of the given
* method.
*
- * @param method a method.
+ * @param method
+ * a method.
* @return the Java type corresponding to the return type of the given
* method.
*/
@@ -379,12 +396,13 @@ public class Type {
/**
* Computes the size of the arguments and of the return value of a method.
*
- * @param desc the descriptor of a method.
+ * @param desc
+ * the descriptor of a method.
* @return the size of the arguments of the method (plus one for the
* implicit this argument), argSize, and the size of its return
* value, retSize, packed into a single int i =
- * <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal
- * to <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
+ * <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal to
+ * <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
*/
public static int getArgumentsAndReturnSizes(final String desc) {
int n = 1;
@@ -419,52 +437,54 @@ public class Type {
* method descriptors, buf is supposed to contain nothing more than the
* descriptor itself.
*
- * @param buf a buffer containing a type descriptor.
- * @param off the offset of this descriptor in the previous buffer.
+ * @param buf
+ * a buffer containing a type descriptor.
+ * @param off
+ * the offset of this descriptor in the previous buffer.
* @return the Java type corresponding to the given type descriptor.
*/
private static Type getType(final char[] buf, final int off) {
int len;
switch (buf[off]) {
- case 'V':
- return VOID_TYPE;
- case 'Z':
- return BOOLEAN_TYPE;
- case 'C':
- return CHAR_TYPE;
- case 'B':
- return BYTE_TYPE;
- case 'S':
- return SHORT_TYPE;
- case 'I':
- return INT_TYPE;
- case 'F':
- return FLOAT_TYPE;
- case 'J':
- return LONG_TYPE;
- case 'D':
- return DOUBLE_TYPE;
- case '[':
- len = 1;
- while (buf[off + len] == '[') {
- ++len;
- }
- if (buf[off + len] == 'L') {
- ++len;
- while (buf[off + len] != ';') {
- ++len;
- }
- }
- return new Type(ARRAY, buf, off, len + 1);
- case 'L':
- len = 1;
+ case 'V':
+ return VOID_TYPE;
+ case 'Z':
+ return BOOLEAN_TYPE;
+ case 'C':
+ return CHAR_TYPE;
+ case 'B':
+ return BYTE_TYPE;
+ case 'S':
+ return SHORT_TYPE;
+ case 'I':
+ return INT_TYPE;
+ case 'F':
+ return FLOAT_TYPE;
+ case 'J':
+ return LONG_TYPE;
+ case 'D':
+ return DOUBLE_TYPE;
+ case '[':
+ len = 1;
+ while (buf[off + len] == '[') {
+ ++len;
+ }
+ if (buf[off + len] == 'L') {
+ ++len;
while (buf[off + len] != ';') {
++len;
}
- return new Type(OBJECT, buf, off + 1, len - 1);
+ }
+ return new Type(ARRAY, buf, off, len + 1);
+ case 'L':
+ len = 1;
+ while (buf[off + len] != ';') {
+ ++len;
+ }
+ return new Type(OBJECT, buf, off + 1, len - 1);
// case '(':
- default:
- return new Type(METHOD, buf, 0, buf.length);
+ default:
+ return new Type(METHOD, buf, off, buf.length - off);
}
}
@@ -475,11 +495,11 @@ public class Type {
/**
* Returns the sort of this Java type.
*
- * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN},
- * {@link #CHAR CHAR}, {@link #BYTE BYTE}, {@link #SHORT SHORT},
- * {@link #INT INT}, {@link #FLOAT FLOAT}, {@link #LONG LONG},
- * {@link #DOUBLE DOUBLE}, {@link #ARRAY ARRAY},
- * {@link #OBJECT OBJECT} or {@link #METHOD METHOD}.
+ * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN}, {@link #CHAR CHAR},
+ * {@link #BYTE BYTE}, {@link #SHORT SHORT}, {@link #INT INT},
+ * {@link #FLOAT FLOAT}, {@link #LONG LONG}, {@link #DOUBLE DOUBLE},
+ * {@link #ARRAY ARRAY}, {@link #OBJECT OBJECT} or {@link #METHOD
+ * METHOD}.
*/
public int getSort() {
return sort;
@@ -517,34 +537,34 @@ public class Type {
*/
public String getClassName() {
switch (sort) {
- case VOID:
- return "void";
- case BOOLEAN:
- return "boolean";
- case CHAR:
- return "char";
- case BYTE:
- return "byte";
- case SHORT:
- return "short";
- case INT:
- return "int";
- case FLOAT:
- return "float";
- case LONG:
- return "long";
- case DOUBLE:
- return "double";
- case ARRAY:
- StringBuffer b = new StringBuffer(getElementType().getClassName());
- for (int i = getDimensions(); i > 0; --i) {
- b.append("[]");
- }
- return b.toString();
- case OBJECT:
- return new String(buf, off, len).replace('/', '.');
- default:
- return null;
+ case VOID:
+ return "void";
+ case BOOLEAN:
+ return "boolean";
+ case CHAR:
+ return "char";
+ case BYTE:
+ return "byte";
+ case SHORT:
+ return "short";
+ case INT:
+ return "int";
+ case FLOAT:
+ return "float";
+ case LONG:
+ return "long";
+ case DOUBLE:
+ return "double";
+ case ARRAY:
+ StringBuffer b = new StringBuffer(getElementType().getClassName());
+ for (int i = getDimensions(); i > 0; --i) {
+ b.append("[]");
+ }
+ return b.toString();
+ case OBJECT:
+ return new String(buf, off, len).replace('/', '.');
+ default:
+ return null;
}
}
@@ -613,15 +633,15 @@ public class Type {
* Returns the descriptor corresponding to the given argument and return
* types.
*
- * @param returnType the return type of the method.
- * @param argumentTypes the argument types of the method.
+ * @param returnType
+ * the return type of the method.
+ * @param argumentTypes
+ * the argument types of the method.
* @return the descriptor corresponding to the given argument and return
* types.
*/
- public static String getMethodDescriptor(
- final Type returnType,
- final Type... argumentTypes)
- {
+ public static String getMethodDescriptor(final Type returnType,
+ final Type... argumentTypes) {
StringBuffer buf = new StringBuffer();
buf.append('(');
for (int i = 0; i < argumentTypes.length; ++i) {
@@ -636,11 +656,13 @@ public class Type {
* Appends the descriptor corresponding to this Java type to the given
* string buffer.
*
- * @param buf the string buffer to which the descriptor must be appended.
+ * @param buf
+ * the string buffer to which the descriptor must be appended.
*/
private void getDescriptor(final StringBuffer buf) {
if (this.buf == null) {
- // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+ // descriptor is in byte 3 of 'off' for primitive types (buf ==
+ // null)
buf.append((char) ((off & 0xFF000000) >>> 24));
} else if (sort == OBJECT) {
buf.append('L');
@@ -661,7 +683,8 @@ public class Type {
* class is its fully qualified name, as returned by Class.getName(), where
* '.' are replaced by '/'.
*
- * @param c an object or array class.
+ * @param c
+ * an object or array class.
* @return the internal name of the given class.
*/
public static String getInternalName(final Class<?> c) {
@@ -671,7 +694,8 @@ public class Type {
/**
* Returns the descriptor corresponding to the given Java type.
*
- * @param c an object class, a primitive class or an array class.
+ * @param c
+ * an object class, a primitive class or an array class.
* @return the descriptor corresponding to the given class.
*/
public static String getDescriptor(final Class<?> c) {
@@ -683,7 +707,8 @@ public class Type {
/**
* Returns the descriptor corresponding to the given constructor.
*
- * @param c a {@link Constructor Constructor} object.
+ * @param c
+ * a {@link Constructor Constructor} object.
* @return the descriptor of the given constructor.
*/
public static String getConstructorDescriptor(final Constructor<?> c) {
@@ -699,7 +724,8 @@ public class Type {
/**
* Returns the descriptor corresponding to the given method.
*
- * @param m a {@link Method Method} object.
+ * @param m
+ * a {@link Method Method} object.
* @return the descriptor of the given method.
*/
public static String getMethodDescriptor(final Method m) {
@@ -717,8 +743,10 @@ public class Type {
/**
* Appends the descriptor of the given class to the given string buffer.
*
- * @param buf the string buffer to which the descriptor must be appended.
- * @param c the class whose descriptor must be computed.
+ * @param buf
+ * the string buffer to which the descriptor must be appended.
+ * @param c
+ * the class whose descriptor must be computed.
*/
private static void getDescriptor(final StringBuffer buf, final Class<?> c) {
Class<?> d = c;
@@ -783,9 +811,10 @@ public class Type {
* Returns a JVM instruction opcode adapted to this Java type. This method
* must not be used for method types.
*
- * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
- * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
- * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+ * @param opcode
+ * a JVM instruction opcode. This opcode must be one of ILOAD,
+ * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG,
+ * ISHL, ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
* @return an opcode that is similar to the given opcode, but adapted to
* this Java type. For example, if this type is <tt>float</tt> and
* <tt>opcode</tt> is IRETURN, this method returns FRETURN.
@@ -809,7 +838,8 @@ public class Type {
/**
* Tests if the given object is equal to this type.
*
- * @param o the object to be compared to this type.
+ * @param o
+ * the object to be compared to this type.
* @return <tt>true</tt> if the given object is equal to this type.
*/
@Override
diff --git a/src/asm/scala/tools/asm/signature/SignatureReader.java b/src/asm/scala/tools/asm/signature/SignatureReader.java
index 22e6427e63..9c7c3880d9 100644
--- a/src/asm/scala/tools/asm/signature/SignatureReader.java
+++ b/src/asm/scala/tools/asm/signature/SignatureReader.java
@@ -46,8 +46,9 @@ public class SignatureReader {
/**
* Constructs a {@link SignatureReader} for the given signature.
*
- * @param signature A <i>ClassSignature</i>, <i>MethodTypeSignature</i>,
- * or <i>FieldTypeSignature</i>.
+ * @param signature
+ * A <i>ClassSignature</i>, <i>MethodTypeSignature</i>, or
+ * <i>FieldTypeSignature</i>.
*/
public SignatureReader(final String signature) {
this.signature = signature;
@@ -58,15 +59,15 @@ public class SignatureReader {
* {@link SignatureReader}. This signature is the one specified in the
* constructor (see {@link #SignatureReader(String) SignatureReader}). This
* method is intended to be called on a {@link SignatureReader} that was
- * created using a <i>ClassSignature</i> (such as the
+ * created using a <i>ClassSignature</i> (such as the <code>signature</code>
+ * parameter of the {@link scala.tools.asm.ClassVisitor#visit
+ * ClassVisitor.visit} method) or a <i>MethodTypeSignature</i> (such as the
* <code>signature</code> parameter of the
- * {@link org.objectweb.asm.ClassVisitor#visit ClassVisitor.visit} method)
- * or a <i>MethodTypeSignature</i> (such as the <code>signature</code>
- * parameter of the
- * {@link org.objectweb.asm.ClassVisitor#visitMethod ClassVisitor.visitMethod}
- * method).
+ * {@link scala.tools.asm.ClassVisitor#visitMethod
+ * ClassVisitor.visitMethod} method).
*
- * @param v the visitor that must visit this signature.
+ * @param v
+ * the visitor that must visit this signature.
*/
public void accept(final SignatureVisitor v) {
String signature = this.signature;
@@ -118,12 +119,12 @@ public class SignatureReader {
* method is intended to be called on a {@link SignatureReader} that was
* created using a <i>FieldTypeSignature</i>, such as the
* <code>signature</code> parameter of the
- * {@link org.objectweb.asm.ClassVisitor#visitField
- * ClassVisitor.visitField} or {@link
- * org.objectweb.asm.MethodVisitor#visitLocalVariable
+ * {@link scala.tools.asm.ClassVisitor#visitField ClassVisitor.visitField}
+ * or {@link scala.tools.asm.MethodVisitor#visitLocalVariable
* MethodVisitor.visitLocalVariable} methods.
*
- * @param v the visitor that must visit this signature.
+ * @param v
+ * the visitor that must visit this signature.
*/
public void acceptType(final SignatureVisitor v) {
parseType(this.signature, 0, v);
@@ -132,98 +133,96 @@ public class SignatureReader {
/**
* Parses a field type signature and makes the given visitor visit it.
*
- * @param signature a string containing the signature that must be parsed.
- * @param pos index of the first character of the signature to parsed.
- * @param v the visitor that must visit this signature.
+ * @param signature
+ * a string containing the signature that must be parsed.
+ * @param pos
+ * index of the first character of the signature to parsed.
+ * @param v
+ * the visitor that must visit this signature.
* @return the index of the first character after the parsed signature.
*/
- private static int parseType(
- final String signature,
- int pos,
- final SignatureVisitor v)
- {
+ private static int parseType(final String signature, int pos,
+ final SignatureVisitor v) {
char c;
int start, end;
boolean visited, inner;
String name;
switch (c = signature.charAt(pos++)) {
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- case 'F':
- case 'J':
- case 'D':
- case 'V':
- v.visitBaseType(c);
- return pos;
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ case 'V':
+ v.visitBaseType(c);
+ return pos;
- case '[':
- return parseType(signature, pos, v.visitArrayType());
+ case '[':
+ return parseType(signature, pos, v.visitArrayType());
- case 'T':
- end = signature.indexOf(';', pos);
- v.visitTypeVariable(signature.substring(pos, end));
- return end + 1;
+ case 'T':
+ end = signature.indexOf(';', pos);
+ v.visitTypeVariable(signature.substring(pos, end));
+ return end + 1;
- default: // case 'L':
- start = pos;
- visited = false;
- inner = false;
- for (;;) {
- switch (c = signature.charAt(pos++)) {
- case '.':
- case ';':
- if (!visited) {
- name = signature.substring(start, pos - 1);
- if (inner) {
- v.visitInnerClassType(name);
- } else {
- v.visitClassType(name);
- }
- }
- if (c == ';') {
- v.visitEnd();
- return pos;
- }
- start = pos;
- visited = false;
- inner = true;
- break;
+ default: // case 'L':
+ start = pos;
+ visited = false;
+ inner = false;
+ for (;;) {
+ switch (c = signature.charAt(pos++)) {
+ case '.':
+ case ';':
+ if (!visited) {
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ }
+ if (c == ';') {
+ v.visitEnd();
+ return pos;
+ }
+ start = pos;
+ visited = false;
+ inner = true;
+ break;
- case '<':
- name = signature.substring(start, pos - 1);
- if (inner) {
- v.visitInnerClassType(name);
- } else {
- v.visitClassType(name);
- }
- visited = true;
- top: for (;;) {
- switch (c = signature.charAt(pos)) {
- case '>':
- break top;
- case '*':
- ++pos;
- v.visitTypeArgument();
- break;
- case '+':
- case '-':
- pos = parseType(signature,
- pos + 1,
- v.visitTypeArgument(c));
- break;
- default:
- pos = parseType(signature,
- pos,
- v.visitTypeArgument('='));
- break;
- }
- }
+ case '<':
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ visited = true;
+ top: for (;;) {
+ switch (c = signature.charAt(pos)) {
+ case '>':
+ break top;
+ case '*':
+ ++pos;
+ v.visitTypeArgument();
+ break;
+ case '+':
+ case '-':
+ pos = parseType(signature, pos + 1,
+ v.visitTypeArgument(c));
+ break;
+ default:
+ pos = parseType(signature, pos,
+ v.visitTypeArgument('='));
+ break;
+ }
}
}
+ }
}
}
}
diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
index 2fc364e374..f38f81f53b 100644
--- a/src/asm/scala/tools/asm/signature/SignatureVisitor.java
+++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
@@ -35,21 +35,21 @@ import scala.tools.asm.Opcodes;
* A visitor to visit a generic signature. The methods of this interface must be
* called in one of the three following orders (the last one is the only valid
* order for a {@link SignatureVisitor} that is returned by a method of this
- * interface): <ul> <li><i>ClassSignature</i> = (
- * <tt>visitFormalTypeParameter</tt>
- * <tt>visitClassBound</tt>?
- * <tt>visitInterfaceBound</tt>* )* ( <tt>visitSuperClass</tt>
- * <tt>visitInterface</tt>* )</li>
+ * interface):
+ * <ul>
+ * <li><i>ClassSignature</i> = ( <tt>visitFormalTypeParameter</tt>
+ * <tt>visitClassBound</tt>? <tt>visitInterfaceBound</tt>* )* (
+ * <tt>visitSuperClass</tt> <tt>visitInterface</tt>* )</li>
* <li><i>MethodSignature</i> = ( <tt>visitFormalTypeParameter</tt>
- * <tt>visitClassBound</tt>?
- * <tt>visitInterfaceBound</tt>* )* ( <tt>visitParameterType</tt>*
- * <tt>visitReturnType</tt>
- * <tt>visitExceptionType</tt>* )</li> <li><i>TypeSignature</i> =
- * <tt>visitBaseType</tt> | <tt>visitTypeVariable</tt> |
- * <tt>visitArrayType</tt> | (
+ * <tt>visitClassBound</tt>? <tt>visitInterfaceBound</tt>* )* (
+ * <tt>visitParameterType</tt>* <tt>visitReturnType</tt>
+ * <tt>visitExceptionType</tt>* )</li>
+ * <li><i>TypeSignature</i> = <tt>visitBaseType</tt> |
+ * <tt>visitTypeVariable</tt> | <tt>visitArrayType</tt> | (
* <tt>visitClassType</tt> <tt>visitTypeArgument</tt>* (
- * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )*
- * <tt>visitEnd</tt> ) )</li> </ul>
+ * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )* <tt>visitEnd</tt>
+ * ) )</li>
+ * </ul>
*
* @author Thomas Hallgren
* @author Eric Bruneton
@@ -80,8 +80,9 @@ public abstract class SignatureVisitor {
/**
* Constructs a new {@link SignatureVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public SignatureVisitor(final int api) {
this.api = api;
@@ -90,7 +91,8 @@ public abstract class SignatureVisitor {
/**
* Visits a formal type parameter.
*
- * @param name the name of the formal parameter.
+ * @param name
+ * the name of the formal parameter.
*/
public void visitFormalTypeParameter(String name) {
}
@@ -162,8 +164,9 @@ public abstract class SignatureVisitor {
/**
* Visits a signature corresponding to a primitive type.
*
- * @param descriptor the descriptor of the primitive type, or 'V' for
- * <tt>void</tt>.
+ * @param descriptor
+ * the descriptor of the primitive type, or 'V' for <tt>void</tt>
+ * .
*/
public void visitBaseType(char descriptor) {
}
@@ -171,7 +174,8 @@ public abstract class SignatureVisitor {
/**
* Visits a signature corresponding to a type variable.
*
- * @param name the name of the type variable.
+ * @param name
+ * the name of the type variable.
*/
public void visitTypeVariable(String name) {
}
@@ -190,7 +194,8 @@ public abstract class SignatureVisitor {
* Starts the visit of a signature corresponding to a class or interface
* type.
*
- * @param name the internal name of the class or interface.
+ * @param name
+ * the internal name of the class or interface.
*/
public void visitClassType(String name) {
}
@@ -198,7 +203,8 @@ public abstract class SignatureVisitor {
/**
* Visits an inner class.
*
- * @param name the local name of the inner class in its enclosing class.
+ * @param name
+ * the local name of the inner class in its enclosing class.
*/
public void visitInnerClassType(String name) {
}
@@ -213,7 +219,8 @@ public abstract class SignatureVisitor {
/**
* Visits a type argument of the last visited class or inner class type.
*
- * @param wildcard '+', '-' or '='.
+ * @param wildcard
+ * '+', '-' or '='.
* @return a non null visitor to visit the signature of the type argument.
*/
public SignatureVisitor visitTypeArgument(char wildcard) {
diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java
index a59fdfde2b..ebf4fe07b4 100644
--- a/src/asm/scala/tools/asm/signature/SignatureWriter.java
+++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java
@@ -224,4 +224,4 @@ public class SignatureWriter extends SignatureVisitor {
}
argumentStack /= 2;
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
index 471f842ffc..411eead3c7 100644
--- a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
@@ -148,7 +148,8 @@ public abstract class AbstractInsnNode {
/**
* Constructs a new {@link AbstractInsnNode}.
*
- * @param opcode the opcode of the instruction to be constructed.
+ * @param opcode
+ * the opcode of the instruction to be constructed.
*/
protected AbstractInsnNode(final int opcode) {
this.opcode = opcode;
@@ -197,38 +198,47 @@ public abstract class AbstractInsnNode {
/**
* Makes the given code visitor visit this instruction.
*
- * @param cv a code visitor.
+ * @param cv
+ * a code visitor.
*/
public abstract void accept(final MethodVisitor cv);
/**
* Returns a copy of this instruction.
*
- * @param labels a map from LabelNodes to cloned LabelNodes.
+ * @param labels
+ * a map from LabelNodes to cloned LabelNodes.
* @return a copy of this instruction. The returned instruction does not
* belong to any {@link InsnList}.
*/
- public abstract AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels);
+ public abstract AbstractInsnNode clone(
+ final Map<LabelNode, LabelNode> labels);
/**
* Returns the clone of the given label.
*
- * @param label a label.
- * @param map a map from LabelNodes to cloned LabelNodes.
+ * @param label
+ * a label.
+ * @param map
+ * a map from LabelNodes to cloned LabelNodes.
* @return the clone of the given label.
*/
- static LabelNode clone(final LabelNode label, final Map<LabelNode, LabelNode> map) {
+ static LabelNode clone(final LabelNode label,
+ final Map<LabelNode, LabelNode> map) {
return map.get(label);
}
/**
* Returns the clones of the given labels.
*
- * @param labels a list of labels.
- * @param map a map from LabelNodes to cloned LabelNodes.
+ * @param labels
+ * a list of labels.
+ * @param map
+ * a map from LabelNodes to cloned LabelNodes.
* @return the clones of the given labels.
*/
- static LabelNode[] clone(final List<LabelNode> labels, final Map<LabelNode, LabelNode> map) {
+ static LabelNode[] clone(final List<LabelNode> labels,
+ final Map<LabelNode, LabelNode> map) {
LabelNode[] clones = new LabelNode[labels.size()];
for (int i = 0; i < clones.length; ++i) {
clones[i] = map.get(labels.get(i));
diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java
index 9f132550e6..1f4beef9f7 100644
--- a/src/asm/scala/tools/asm/tree/AnnotationNode.java
+++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java
@@ -52,11 +52,11 @@ public class AnnotationNode extends AnnotationVisitor {
* as two consecutive elements in the list. The name is a {@link String},
* and the value may be a {@link Byte}, {@link Boolean}, {@link Character},
* {@link Short}, {@link Integer}, {@link Long}, {@link Float},
- * {@link Double}, {@link String} or {@link org.objectweb.asm.Type}, or an
+ * {@link Double}, {@link String} or {@link scala.tools.asm.Type}, or an
* two elements String array (for enumeration values), a
* {@link AnnotationNode}, or a {@link List} of values of one of the
- * preceding types. The list may be <tt>null</tt> if there is no name
- * value pair.
+ * preceding types. The list may be <tt>null</tt> if there is no name value
+ * pair.
*/
public List<Object> values;
@@ -65,7 +65,8 @@ public class AnnotationNode extends AnnotationVisitor {
* constructor</i>. Instead, they must use the
* {@link #AnnotationNode(int, String)} version.
*
- * @param desc the class descriptor of the annotation class.
+ * @param desc
+ * the class descriptor of the annotation class.
*/
public AnnotationNode(final String desc) {
this(Opcodes.ASM4, desc);
@@ -74,9 +75,11 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Constructs a new {@link AnnotationNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param desc the class descriptor of the annotation class.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param desc
+ * the class descriptor of the annotation class.
*/
public AnnotationNode(final int api, final String desc) {
super(api);
@@ -86,7 +89,8 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Constructs a new {@link AnnotationNode} to visit an array value.
*
- * @param values where the visited values must be stored.
+ * @param values
+ * where the visited values must be stored.
*/
AnnotationNode(final List<Object> values) {
super(Opcodes.ASM4);
@@ -109,11 +113,8 @@ public class AnnotationNode extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
if (values == null) {
values = new ArrayList<Object>(this.desc != null ? 2 : 1);
}
@@ -124,10 +125,8 @@ public class AnnotationNode extends AnnotationVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
if (values == null) {
values = new ArrayList<Object>(this.desc != null ? 2 : 1);
}
@@ -166,7 +165,8 @@ public class AnnotationNode extends AnnotationVisitor {
* recursively, do not contain elements that were introduced in more recent
* versions of the ASM API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -175,7 +175,8 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Makes the given visitor visit this annotation.
*
- * @param av an annotation visitor. Maybe <tt>null</tt>.
+ * @param av
+ * an annotation visitor. Maybe <tt>null</tt>.
*/
public void accept(final AnnotationVisitor av) {
if (av != null) {
@@ -193,15 +194,15 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Makes the given visitor visit a given annotation value.
*
- * @param av an annotation visitor. Maybe <tt>null</tt>.
- * @param name the value name.
- * @param value the actual value.
+ * @param av
+ * an annotation visitor. Maybe <tt>null</tt>.
+ * @param name
+ * the value name.
+ * @param value
+ * the actual value.
*/
- static void accept(
- final AnnotationVisitor av,
- final String name,
- final Object value)
- {
+ static void accept(final AnnotationVisitor av, final String name,
+ final Object value) {
if (av != null) {
if (value instanceof String[]) {
String[] typeconst = (String[]) value;
diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java
index 64effae698..c3d999985a 100644
--- a/src/asm/scala/tools/asm/tree/ClassNode.java
+++ b/src/asm/scala/tools/asm/tree/ClassNode.java
@@ -53,33 +53,33 @@ public class ClassNode extends ClassVisitor {
public int version;
/**
- * The class's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * The class's access flags (see {@link scala.tools.asm.Opcodes}). This
* field also indicates if the class is deprecated.
*/
public int access;
/**
* The internal name of the class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String name;
/**
- * The signature of the class. Mayt be <tt>null</tt>.
+ * The signature of the class. May be <tt>null</tt>.
*/
public String signature;
/**
* The internal of name of the super class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). For
- * interfaces, the super class is {@link Object}. May be <tt>null</tt>,
- * but only for the {@link Object} class.
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}). For
+ * interfaces, the super class is {@link Object}. May be <tt>null</tt>, but
+ * only for the {@link Object} class.
*/
public String superName;
/**
* The internal names of the class's interfaces (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). This
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}). This
* list is a list of {@link String} objects.
*/
public List<String> interfaces;
@@ -91,7 +91,7 @@ public class ClassNode extends ClassVisitor {
public String sourceFile;
/**
- * Debug information to compute the correspondance between source and
+ * Debug information to compute the correspondence between source and
* compiled elements of the class. May be <tt>null</tt>.
*/
public String sourceDebug;
@@ -109,8 +109,8 @@ public class ClassNode extends ClassVisitor {
public String outerMethod;
/**
- * The descriptor of the method that contains the class, or <tt>null</tt>
- * if the class is not enclosed in a method.
+ * The descriptor of the method that contains the class, or <tt>null</tt> if
+ * the class is not enclosed in a method.
*/
public String outerMethodDesc;
@@ -118,7 +118,7 @@ public class ClassNode extends ClassVisitor {
* The runtime visible annotations of this class. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible
*/
public List<AnnotationNode> visibleAnnotations;
@@ -127,7 +127,7 @@ public class ClassNode extends ClassVisitor {
* The runtime invisible annotations of this class. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible
*/
public List<AnnotationNode> invisibleAnnotations;
@@ -136,7 +136,7 @@ public class ClassNode extends ClassVisitor {
* The non standard attributes of this class. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.Attribute
+ * @associates scala.tools.asm.Attribute
*/
public List<Attribute> attrs;
@@ -144,7 +144,7 @@ public class ClassNode extends ClassVisitor {
* Informations about the inner classes of this class. This list is a list
* of {@link InnerClassNode} objects.
*
- * @associates org.objectweb.asm.tree.InnerClassNode
+ * @associates scala.tools.asm.tree.InnerClassNode
*/
public List<InnerClassNode> innerClasses;
@@ -152,7 +152,7 @@ public class ClassNode extends ClassVisitor {
* The fields of this class. This list is a list of {@link FieldNode}
* objects.
*
- * @associates org.objectweb.asm.tree.FieldNode
+ * @associates scala.tools.asm.tree.FieldNode
*/
public List<FieldNode> fields;
@@ -160,7 +160,7 @@ public class ClassNode extends ClassVisitor {
* The methods of this class. This list is a list of {@link MethodNode}
* objects.
*
- * @associates org.objectweb.asm.tree.MethodNode
+ * @associates scala.tools.asm.tree.MethodNode
*/
public List<MethodNode> methods;
@@ -176,8 +176,9 @@ public class ClassNode extends ClassVisitor {
/**
* Constructs a new {@link ClassNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public ClassNode(final int api) {
super(api);
@@ -192,14 +193,9 @@ public class ClassNode extends ClassVisitor {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
this.version = version;
this.access = access;
this.name = name;
@@ -217,21 +213,16 @@ public class ClassNode extends ClassVisitor {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
outerClass = owner;
outerMethod = name;
outerMethodDesc = desc;
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleAnnotations == null) {
@@ -256,44 +247,25 @@ public class ClassNode extends ClassVisitor {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
- InnerClassNode icn = new InnerClassNode(name,
- outerName,
- innerName,
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
+ InnerClassNode icn = new InnerClassNode(name, outerName, innerName,
access);
innerClasses.add(icn);
}
@Override
- public FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
FieldNode fn = new FieldNode(access, name, desc, signature, value);
fields.add(fn);
return fn;
}
@Override
- public MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
- MethodNode mn = new MethodNode(access,
- name,
- desc,
- signature,
+ public MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
+ MethodNode mn = new MethodNode(access, name, desc, signature,
exceptions);
methods.add(mn);
return mn;
@@ -313,7 +285,8 @@ public class ClassNode extends ClassVisitor {
* contain elements that were introduced in more recent versions of the ASM
* API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -322,7 +295,8 @@ public class ClassNode extends ClassVisitor {
/**
* Makes the given class visitor visit this class.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
// visits header
diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
index 6b7a6a142a..0c94f18adf 100644
--- a/src/asm/scala/tools/asm/tree/FieldInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
@@ -43,7 +43,7 @@ public class FieldInsnNode extends AbstractInsnNode {
/**
* The internal name of the field's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String owner;
@@ -53,26 +53,27 @@ public class FieldInsnNode extends AbstractInsnNode {
public String name;
/**
- * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ * The field's descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
/**
* Constructs a new {@link FieldInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
- * @param owner the internal name of the field's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link org.objectweb.asm.Type}).
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner
+ * the internal name of the field's owner class (see
+ * {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}).
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link scala.tools.asm.Type}).
*/
- public FieldInsnNode(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public FieldInsnNode(final int opcode, final String owner,
+ final String name, final String desc) {
super(opcode);
this.owner = owner;
this.name = name;
@@ -82,8 +83,9 @@ public class FieldInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be GETSTATIC,
- * PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param opcode
+ * the new instruction opcode. This opcode must be GETSTATIC,
+ * PUTSTATIC, GETFIELD or PUTFIELD.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java
index 9a1e17033c..61b614ec59 100644
--- a/src/asm/scala/tools/asm/tree/FieldNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldNode.java
@@ -46,7 +46,7 @@ import scala.tools.asm.Opcodes;
public class FieldNode extends FieldVisitor {
/**
- * The field's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * The field's access flags (see {@link scala.tools.asm.Opcodes}). This
* field also indicates if the field is synthetic and/or deprecated.
*/
public int access;
@@ -57,7 +57,7 @@ public class FieldNode extends FieldVisitor {
public String name;
/**
- * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ * The field's descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
@@ -67,8 +67,8 @@ public class FieldNode extends FieldVisitor {
public String signature;
/**
- * The field's initial value. This field, which may be <tt>null</tt> if
- * the field does not have an initial value, must be an {@link Integer}, a
+ * The field's initial value. This field, which may be <tt>null</tt> if the
+ * field does not have an initial value, must be an {@link Integer}, a
* {@link Float}, a {@link Long}, a {@link Double} or a {@link String}.
*/
public Object value;
@@ -77,7 +77,7 @@ public class FieldNode extends FieldVisitor {
* The runtime visible annotations of this field. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible
*/
public List<AnnotationNode> visibleAnnotations;
@@ -86,7 +86,7 @@ public class FieldNode extends FieldVisitor {
* The runtime invisible annotations of this field. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible
*/
public List<AnnotationNode> invisibleAnnotations;
@@ -95,7 +95,7 @@ public class FieldNode extends FieldVisitor {
* The non standard attributes of this field. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.Attribute
+ * @associates scala.tools.asm.Attribute
*/
public List<Attribute> attrs;
@@ -104,25 +104,25 @@ public class FieldNode extends FieldVisitor {
* constructor</i>. Instead, they must use the
* {@link #FieldNode(int, int, String, String, String, Object)} version.
*
- * @param access the field's access flags (see
- * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
- * if the field is synthetic and/or deprecated.
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
- * Type}).
- * @param signature the field's signature.
- * @param value the field's initial value. This parameter, which may be
- * <tt>null</tt> if the field does not have an initial value, must be
- * an {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String}.
+ * @param access
+ * the field's access flags (see
+ * {@link scala.tools.asm.Opcodes}). This parameter also
+ * indicates if the field is synthetic and/or deprecated.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link scala.tools.asm.Type
+ * Type}).
+ * @param signature
+ * the field's signature.
+ * @param value
+ * the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value,
+ * must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
*/
- public FieldNode(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldNode(final int access, final String name, final String desc,
+ final String signature, final Object value) {
this(Opcodes.ASM4, access, name, desc, signature, value);
}
@@ -131,28 +131,28 @@ public class FieldNode extends FieldVisitor {
* constructor</i>. Instead, they must use the
* {@link #FieldNode(int, int, String, String, String, Object)} version.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param access the field's access flags (see
- * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
- * if the field is synthetic and/or deprecated.
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
- * Type}).
- * @param signature the field's signature.
- * @param value the field's initial value. This parameter, which may be
- * <tt>null</tt> if the field does not have an initial value, must be
- * an {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access
+ * the field's access flags (see
+ * {@link scala.tools.asm.Opcodes}). This parameter also
+ * indicates if the field is synthetic and/or deprecated.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link scala.tools.asm.Type
+ * Type}).
+ * @param signature
+ * the field's signature.
+ * @param value
+ * the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value,
+ * must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
*/
- public FieldNode(
- final int api,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldNode(final int api, final int access, final String name,
+ final String desc, final String signature, final Object value) {
super(api);
this.access = access;
this.name = name;
@@ -166,10 +166,8 @@ public class FieldNode extends FieldVisitor {
// ------------------------------------------------------------------------
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleAnnotations == null) {
@@ -207,7 +205,8 @@ public class FieldNode extends FieldVisitor {
* contain elements that were introduced in more recent versions of the ASM
* API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -216,7 +215,8 @@ public class FieldNode extends FieldVisitor {
/**
* Makes the given class visitor visit this field.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
FieldVisitor fv = cv.visitField(access, name, desc, signature, value);
diff --git a/src/asm/scala/tools/asm/tree/FrameNode.java b/src/asm/scala/tools/asm/tree/FrameNode.java
index 66825de0ac..f13fc66749 100644
--- a/src/asm/scala/tools/asm/tree/FrameNode.java
+++ b/src/asm/scala/tools/asm/tree/FrameNode.java
@@ -45,8 +45,9 @@ import scala.tools.asm.Opcodes;
* the target of a jump instruction, or that starts an exception handler block.
* The stack map frame types must describe the values of the local variables and
* of the operand stack elements <i>just before</i> <b>i</b> is executed. <br>
- * <br> (*) this is mandatory only for classes whose version is greater than or
- * equal to {@link Opcodes#V1_6 V1_6}.
+ * <br>
+ * (*) this is mandatory only for classes whose version is greater than or equal
+ * to {@link Opcodes#V1_6 V1_6}.
*
* @author Eric Bruneton
*/
@@ -83,48 +84,48 @@ public class FrameNode extends AbstractInsnNode {
/**
* Constructs a new {@link FrameNode}.
*
- * @param type the type of this frame. Must be {@link Opcodes#F_NEW} for
- * expanded frames, or {@link Opcodes#F_FULL},
- * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
- * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
- * {@link Opcodes#F_SAME1} for compressed frames.
- * @param nLocal number of local variables of this stack map frame.
- * @param local the types of the local variables of this stack map frame.
- * Elements of this list can be Integer, String or LabelNode objects
- * (for primitive, reference and uninitialized types respectively -
- * see {@link MethodVisitor}).
- * @param nStack number of operand stack elements of this stack map frame.
- * @param stack the types of the operand stack elements of this stack map
- * frame. Elements of this list can be Integer, String or LabelNode
- * objects (for primitive, reference and uninitialized types
- * respectively - see {@link MethodVisitor}).
+ * @param type
+ * the type of this frame. Must be {@link Opcodes#F_NEW} for
+ * expanded frames, or {@link Opcodes#F_FULL},
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
+ * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_SAME1} for compressed frames.
+ * @param nLocal
+ * number of local variables of this stack map frame.
+ * @param local
+ * the types of the local variables of this stack map frame.
+ * Elements of this list can be Integer, String or LabelNode
+ * objects (for primitive, reference and uninitialized types
+ * respectively - see {@link MethodVisitor}).
+ * @param nStack
+ * number of operand stack elements of this stack map frame.
+ * @param stack
+ * the types of the operand stack elements of this stack map
+ * frame. Elements of this list can be Integer, String or
+ * LabelNode objects (for primitive, reference and uninitialized
+ * types respectively - see {@link MethodVisitor}).
*/
- public FrameNode(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public FrameNode(final int type, final int nLocal, final Object[] local,
+ final int nStack, final Object[] stack) {
super(-1);
this.type = type;
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- this.local = asList(nLocal, local);
- this.stack = asList(nStack, stack);
- break;
- case Opcodes.F_APPEND:
- this.local = asList(nLocal, local);
- break;
- case Opcodes.F_CHOP:
- this.local = Arrays.asList(new Object[nLocal]);
- break;
- case Opcodes.F_SAME:
- break;
- case Opcodes.F_SAME1:
- this.stack = asList(1, stack);
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ this.local = asList(nLocal, local);
+ this.stack = asList(nStack, stack);
+ break;
+ case Opcodes.F_APPEND:
+ this.local = asList(nLocal, local);
+ break;
+ case Opcodes.F_CHOP:
+ this.local = Arrays.asList(new Object[nLocal]);
+ break;
+ case Opcodes.F_SAME:
+ break;
+ case Opcodes.F_SAME1:
+ this.stack = asList(1, stack);
+ break;
}
}
@@ -136,31 +137,29 @@ public class FrameNode extends AbstractInsnNode {
/**
* Makes the given visitor visit this stack map frame.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
@Override
public void accept(final MethodVisitor mv) {
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- mv.visitFrame(type,
- local.size(),
- asArray(local),
- stack.size(),
- asArray(stack));
- break;
- case Opcodes.F_APPEND:
- mv.visitFrame(type, local.size(), asArray(local), 0, null);
- break;
- case Opcodes.F_CHOP:
- mv.visitFrame(type, local.size(), null, 0, null);
- break;
- case Opcodes.F_SAME:
- mv.visitFrame(type, 0, null, 0, null);
- break;
- case Opcodes.F_SAME1:
- mv.visitFrame(type, 0, null, 1, asArray(stack));
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mv.visitFrame(type, local.size(), asArray(local), stack.size(),
+ asArray(stack));
+ break;
+ case Opcodes.F_APPEND:
+ mv.visitFrame(type, local.size(), asArray(local), 0, null);
+ break;
+ case Opcodes.F_CHOP:
+ mv.visitFrame(type, local.size(), null, 0, null);
+ break;
+ case Opcodes.F_SAME:
+ mv.visitFrame(type, 0, null, 0, null);
+ break;
+ case Opcodes.F_SAME1:
+ mv.visitFrame(type, 0, null, 1, asArray(stack));
+ break;
}
}
diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java
index 75ac40884d..f9adf2e38c 100644
--- a/src/asm/scala/tools/asm/tree/IincInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java
@@ -54,8 +54,10 @@ public class IincInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link IincInsnNode}.
*
- * @param var index of the local variable to be incremented.
- * @param incr increment amount to increment the local variable by.
+ * @param var
+ * index of the local variable to be incremented.
+ * @param incr
+ * increment amount to increment the local variable by.
*/
public IincInsnNode(final int var, final int incr) {
super(Opcodes.IINC);
@@ -77,4 +79,4 @@ public class IincInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new IincInsnNode(var, incr);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/InnerClassNode.java b/src/asm/scala/tools/asm/tree/InnerClassNode.java
index 4579488921..aa3810c759 100644
--- a/src/asm/scala/tools/asm/tree/InnerClassNode.java
+++ b/src/asm/scala/tools/asm/tree/InnerClassNode.java
@@ -40,14 +40,14 @@ public class InnerClassNode {
/**
* The internal name of an inner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String name;
/**
* The internal name of the class to which the inner class belongs (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). May
- * be <tt>null</tt>.
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
public String outerName;
@@ -66,24 +66,23 @@ public class InnerClassNode {
/**
* Constructs a new {@link InnerClassNode}.
*
- * @param name the internal name of an inner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * @param outerName the internal name of the class to which the inner class
- * belongs (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * May be <tt>null</tt>.
- * @param innerName the (simple) name of the inner class inside its
- * enclosing class. May be <tt>null</tt> for anonymous inner
- * classes.
- * @param access the access flags of the inner class as originally declared
- * in the enclosing class.
+ * @param name
+ * the internal name of an inner class (see
+ * {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}).
+ * @param outerName
+ * the internal name of the class to which the inner class
+ * belongs (see {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}). May be <tt>null</tt>.
+ * @param innerName
+ * the (simple) name of the inner class inside its enclosing
+ * class. May be <tt>null</tt> for anonymous inner classes.
+ * @param access
+ * the access flags of the inner class as originally declared in
+ * the enclosing class.
*/
- public InnerClassNode(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public InnerClassNode(final String name, final String outerName,
+ final String innerName, final int access) {
this.name = name;
this.outerName = outerName;
this.innerName = innerName;
@@ -93,7 +92,8 @@ public class InnerClassNode {
/**
* Makes the given class visitor visit this inner class.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
cv.visitInnerClass(name, outerName, innerName, access);
diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
index dedd3bba73..55d83c2e8b 100644
--- a/src/asm/scala/tools/asm/tree/InsnList.java
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
@@ -73,8 +73,8 @@ public class InsnList {
/**
* Returns the first instruction in this list.
*
- * @return the first instruction in this list, or <tt>null</tt> if the
- * list is empty.
+ * @return the first instruction in this list, or <tt>null</tt> if the list
+ * is empty.
*/
public AbstractInsnNode getFirst() {
return first;
@@ -96,9 +96,11 @@ public class InsnList {
* time it is called. Once the cache is built, this method run in constant
* time. This cache is invalidated by all the methods that modify the list.
*
- * @param index the index of the instruction that must be returned.
+ * @param index
+ * the index of the instruction that must be returned.
* @return the instruction whose index is given.
- * @throws IndexOutOfBoundsException if (index < 0 || index >= size()).
+ * @throws IndexOutOfBoundsException
+ * if (index < 0 || index >= size()).
*/
public AbstractInsnNode get(final int index) {
if (index < 0 || index >= size) {
@@ -111,11 +113,12 @@ public class InsnList {
}
/**
- * Returns <tt>true</tt> if the given instruction belongs to this list.
- * This method always scans the instructions of this list until it finds the
+ * Returns <tt>true</tt> if the given instruction belongs to this list. This
+ * method always scans the instructions of this list until it finds the
* given instruction or reaches the end of the list.
*
- * @param insn an instruction.
+ * @param insn
+ * an instruction.
* @return <tt>true</tt> if the given instruction belongs to this list.
*/
public boolean contains(final AbstractInsnNode insn) {
@@ -133,7 +136,8 @@ public class InsnList {
* constant time. The cache is invalidated by all the methods that modify
* the list.
*
- * @param insn an instruction <i>of this list</i>.
+ * @param insn
+ * an instruction <i>of this list</i>.
* @return the index of the given instruction in this list. <i>The result of
* this method is undefined if the given instruction does not belong
* to this list</i>. Use {@link #contains contains} to test if an
@@ -149,7 +153,8 @@ public class InsnList {
/**
* Makes the given visitor visit all of the instructions in this list.
*
- * @param mv the method visitor that must visit the instructions.
+ * @param mv
+ * the method visitor that must visit the instructions.
*/
public void accept(final MethodVisitor mv) {
AbstractInsnNode insn = first;
@@ -198,9 +203,11 @@ public class InsnList {
/**
* Replaces an instruction of this list with another instruction.
*
- * @param location an instruction <i>of this list</i>.
- * @param insn another instruction, <i>which must not belong to any
- * {@link InsnList}</i>.
+ * @param location
+ * an instruction <i>of this list</i>.
+ * @param insn
+ * another instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
*/
public void set(final AbstractInsnNode location, final AbstractInsnNode insn) {
AbstractInsnNode next = location.next;
@@ -232,8 +239,9 @@ public class InsnList {
/**
* Adds the given instruction to the end of this list.
*
- * @param insn an instruction, <i>which must not belong to any
- * {@link InsnList}</i>.
+ * @param insn
+ * an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
*/
public void add(final AbstractInsnNode insn) {
++size;
@@ -252,8 +260,9 @@ public class InsnList {
/**
* Adds the given instructions to the end of this list.
*
- * @param insns an instruction list, which is cleared during the process.
- * This list must be different from 'this'.
+ * @param insns
+ * an instruction list, which is cleared during the process. This
+ * list must be different from 'this'.
*/
public void add(final InsnList insns) {
if (insns.size == 0) {
@@ -276,8 +285,9 @@ public class InsnList {
/**
* Inserts the given instruction at the begining of this list.
*
- * @param insn an instruction, <i>which must not belong to any
- * {@link InsnList}</i>.
+ * @param insn
+ * an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
*/
public void insert(final AbstractInsnNode insn) {
++size;
@@ -296,8 +306,9 @@ public class InsnList {
/**
* Inserts the given instructions at the begining of this list.
*
- * @param insns an instruction list, which is cleared during the process.
- * This list must be different from 'this'.
+ * @param insns
+ * an instruction list, which is cleared during the process. This
+ * list must be different from 'this'.
*/
public void insert(final InsnList insns) {
if (insns.size == 0) {
@@ -320,12 +331,15 @@ public class InsnList {
/**
* Inserts the given instruction after the specified instruction.
*
- * @param location an instruction <i>of this list</i> after which insn must be
- * inserted.
- * @param insn the instruction to be inserted, <i>which must not belong to
- * any {@link InsnList}</i>.
+ * @param location
+ * an instruction <i>of this list</i> after which insn must be
+ * inserted.
+ * @param insn
+ * the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
*/
- public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ public void insert(final AbstractInsnNode location,
+ final AbstractInsnNode insn) {
++size;
AbstractInsnNode next = location.next;
if (next == null) {
@@ -343,10 +357,12 @@ public class InsnList {
/**
* Inserts the given instructions after the specified instruction.
*
- * @param location an instruction <i>of this list</i> after which the
- * instructions must be inserted.
- * @param insns the instruction list to be inserted, which is cleared during
- * the process. This list must be different from 'this'.
+ * @param location
+ * an instruction <i>of this list</i> after which the
+ * instructions must be inserted.
+ * @param insns
+ * the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
*/
public void insert(final AbstractInsnNode location, final InsnList insns) {
if (insns.size == 0) {
@@ -371,12 +387,15 @@ public class InsnList {
/**
* Inserts the given instruction before the specified instruction.
*
- * @param location an instruction <i>of this list</i> before which insn must be
- * inserted.
- * @param insn the instruction to be inserted, <i>which must not belong to
- * any {@link InsnList}</i>.
+ * @param location
+ * an instruction <i>of this list</i> before which insn must be
+ * inserted.
+ * @param insn
+ * the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
*/
- public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ public void insertBefore(final AbstractInsnNode location,
+ final AbstractInsnNode insn) {
++size;
AbstractInsnNode prev = location.prev;
if (prev == null) {
@@ -394,37 +413,39 @@ public class InsnList {
/**
* Inserts the given instructions before the specified instruction.
*
- * @param location an instruction <i>of this list</i> before which the instructions
- * must be inserted.
- * @param insns the instruction list to be inserted, which is cleared during
- * the process. This list must be different from 'this'.
+ * @param location
+ * an instruction <i>of this list</i> before which the
+ * instructions must be inserted.
+ * @param insns
+ * the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
*/
- public void insertBefore(final AbstractInsnNode location, final InsnList insns) {
+ public void insertBefore(final AbstractInsnNode location,
+ final InsnList insns) {
if (insns.size == 0) {
return;
}
size += insns.size;
AbstractInsnNode ifirst = insns.first;
AbstractInsnNode ilast = insns.last;
- AbstractInsnNode prev = location .prev;
+ AbstractInsnNode prev = location.prev;
if (prev == null) {
first = ifirst;
} else {
prev.next = ifirst;
}
- location .prev = ilast;
- ilast.next = location ;
+ location.prev = ilast;
+ ilast.next = location;
ifirst.prev = prev;
cache = null;
insns.removeAll(false);
}
-
-
/**
* Removes the given instruction from this list.
*
- * @param insn the instruction <i>of this list</i> that must be removed.
+ * @param insn
+ * the instruction <i>of this list</i> that must be removed.
*/
public void remove(final AbstractInsnNode insn) {
--size;
@@ -456,8 +477,9 @@ public class InsnList {
/**
* Removes all of the instructions of this list.
*
- * @param mark if the instructions must be marked as no longer belonging to
- * any {@link InsnList}.
+ * @param mark
+ * if the instructions must be marked as no longer belonging to
+ * any {@link InsnList}.
*/
void removeAll(final boolean mark) {
if (mark) {
@@ -499,14 +521,14 @@ public class InsnList {
}
// this class is not generified because it will create bridges
- private final class InsnListIterator implements ListIterator/*<AbstractInsnNode>*/ {
+ private final class InsnListIterator implements ListIterator {
AbstractInsnNode next;
AbstractInsnNode prev;
InsnListIterator(int index) {
- if(index==size()) {
+ if (index == size()) {
next = null;
prev = getLast();
} else {
diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java
index d4664d23c2..4d5288cafa 100644
--- a/src/asm/scala/tools/asm/tree/InsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InsnNode.java
@@ -43,20 +43,22 @@ public class InsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link InsnNode}.
*
- * @param opcode the opcode of the instruction to be constructed. This
- * opcode must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
- * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
- * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD,
- * FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE,
- * FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2,
- * DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD,
- * FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV,
- * LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG,
- * ISHL, LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR,
- * LXOR, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F,
- * I2B, I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
- * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
- * MONITORENTER, or MONITOREXIT.
+ * @param opcode
+ * the opcode of the instruction to be constructed. This opcode
+ * must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+ * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+ * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD,
+ * LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD,
+ * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE,
+ * SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1,
+ * DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB,
+ * IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM,
+ * FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR,
+ * IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
+ * L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S,
+ * LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN,
+ * DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER,
+ * or MONITOREXIT.
*/
public InsnNode(final int opcode) {
super(opcode);
@@ -70,7 +72,8 @@ public class InsnNode extends AbstractInsnNode {
/**
* Makes the given visitor visit this instruction.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
@Override
public void accept(final MethodVisitor mv) {
diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java
index b61270c786..e0aeed4bc8 100644
--- a/src/asm/scala/tools/asm/tree/IntInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java
@@ -48,9 +48,11 @@ public class IntInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link IntInsnNode}.
*
- * @param opcode the opcode of the instruction to be constructed. This
- * opcode must be BIPUSH, SIPUSH or NEWARRAY.
- * @param operand the operand of the instruction to be constructed.
+ * @param opcode
+ * the opcode of the instruction to be constructed. This opcode
+ * must be BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand
+ * the operand of the instruction to be constructed.
*/
public IntInsnNode(final int opcode, final int operand) {
super(opcode);
@@ -60,8 +62,9 @@ public class IntInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be BIPUSH,
- * SIPUSH or NEWARRAY.
+ * @param opcode
+ * the new instruction opcode. This opcode must be BIPUSH, SIPUSH
+ * or NEWARRAY.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
index d993b5a054..7ee84b875b 100644
--- a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
@@ -65,17 +65,17 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link InvokeDynamicInsnNode}.
*
- * @param name invokedynamic name.
- * @param desc invokedynamic descriptor (see {@link org.objectweb.asm.Type}).
- * @param bsm the bootstrap method.
- * @param bsmArgs the boostrap constant arguments.
+ * @param name
+ * invokedynamic name.
+ * @param desc
+ * invokedynamic descriptor (see {@link scala.tools.asm.Type}).
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the boostrap constant arguments.
*/
- public InvokeDynamicInsnNode(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ public InvokeDynamicInsnNode(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
super(Opcodes.INVOKEDYNAMIC);
this.name = name;
this.desc = desc;
@@ -97,4 +97,4 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
index 339ebbd2d0..81e1e09deb 100644
--- a/src/asm/scala/tools/asm/tree/JumpInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
@@ -50,13 +50,15 @@ public class JumpInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link JumpInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
- * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
- * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
- * @param label the operand of the instruction to be constructed. This
- * operand is a label that designates the instruction to which the
- * jump instruction may jump.
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+ * IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label
+ * the operand of the instruction to be constructed. This operand
+ * is a label that designates the instruction to which the jump
+ * instruction may jump.
*/
public JumpInsnNode(final int opcode, final LabelNode label) {
super(opcode);
@@ -66,10 +68,11 @@ public class JumpInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be IFEQ, IFNE,
- * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
- * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, JSR,
- * IFNULL or IFNONNULL.
+ * @param opcode
+ * the new instruction opcode. This opcode must be IFEQ, IFNE,
+ * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
+ * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO,
+ * JSR, IFNULL or IFNONNULL.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/LabelNode.java b/src/asm/scala/tools/asm/tree/LabelNode.java
index 523a8d6442..44c48c1160 100644
--- a/src/asm/scala/tools/asm/tree/LabelNode.java
+++ b/src/asm/scala/tools/asm/tree/LabelNode.java
@@ -75,4 +75,4 @@ public class LabelNode extends AbstractInsnNode {
public void resetLabel() {
label = null;
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
index f8d115acd5..4e328f9b39 100644
--- a/src/asm/scala/tools/asm/tree/LdcInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
@@ -44,16 +44,17 @@ public class LdcInsnNode extends AbstractInsnNode {
/**
* The constant to be loaded on the stack. This parameter must be a non null
* {@link Integer}, a {@link Float}, a {@link Long}, a {@link Double}, a
- * {@link String} or a {@link org.objectweb.asm.Type}.
+ * {@link String} or a {@link scala.tools.asm.Type}.
*/
public Object cst;
/**
* Constructs a new {@link LdcInsnNode}.
*
- * @param cst the constant to be loaded on the stack. This parameter must be
- * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String}.
+ * @param cst
+ * the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
*/
public LdcInsnNode(final Object cst) {
super(Opcodes.LDC);
@@ -74,4 +75,4 @@ public class LdcInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new LdcInsnNode(cst);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/LineNumberNode.java b/src/asm/scala/tools/asm/tree/LineNumberNode.java
index acc83c8d30..9947aa70a9 100644
--- a/src/asm/scala/tools/asm/tree/LineNumberNode.java
+++ b/src/asm/scala/tools/asm/tree/LineNumberNode.java
@@ -55,9 +55,11 @@ public class LineNumberNode extends AbstractInsnNode {
/**
* Constructs a new {@link LineNumberNode}.
*
- * @param line a line number. This number refers to the source file from
- * which the class was compiled.
- * @param start the first instruction corresponding to this line number.
+ * @param line
+ * a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start
+ * the first instruction corresponding to this line number.
*/
public LineNumberNode(final int line, final LabelNode start) {
super(-1);
diff --git a/src/asm/scala/tools/asm/tree/LocalVariableNode.java b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
index 51cbd3ca00..0d8e27356f 100644
--- a/src/asm/scala/tools/asm/tree/LocalVariableNode.java
+++ b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
@@ -73,24 +73,24 @@ public class LocalVariableNode {
/**
* Constructs a new {@link LocalVariableNode}.
*
- * @param name the name of a local variable.
- * @param desc the type descriptor of this local variable.
- * @param signature the signature of this local variable. May be
- * <tt>null</tt>.
- * @param start the first instruction corresponding to the scope of this
- * local variable (inclusive).
- * @param end the last instruction corresponding to the scope of this local
- * variable (exclusive).
- * @param index the local variable's index.
+ * @param name
+ * the name of a local variable.
+ * @param desc
+ * the type descriptor of this local variable.
+ * @param signature
+ * the signature of this local variable. May be <tt>null</tt>.
+ * @param start
+ * the first instruction corresponding to the scope of this local
+ * variable (inclusive).
+ * @param end
+ * the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index
+ * the local variable's index.
*/
- public LocalVariableNode(
- final String name,
- final String desc,
- final String signature,
- final LabelNode start,
- final LabelNode end,
- final int index)
- {
+ public LocalVariableNode(final String name, final String desc,
+ final String signature, final LabelNode start, final LabelNode end,
+ final int index) {
this.name = name;
this.desc = desc;
this.signature = signature;
@@ -102,14 +102,11 @@ public class LocalVariableNode {
/**
* Makes the given visitor visit this local variable declaration.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
public void accept(final MethodVisitor mv) {
- mv.visitLocalVariable(name,
- desc,
- signature,
- start.getLabel(),
- end.getLabel(),
- index);
+ mv.visitLocalVariable(name, desc, signature, start.getLabel(),
+ end.getLabel(), index);
}
}
diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
index 6d0f971c29..d2479b4814 100644
--- a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
@@ -64,20 +64,21 @@ public class LookupSwitchInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link LookupSwitchInsnNode}.
*
- * @param dflt beginning of the default handler block.
- * @param keys the values of the keys.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param keys
+ * the values of the keys.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>keys[i]</tt> key.
*/
- public LookupSwitchInsnNode(
- final LabelNode dflt,
- final int[] keys,
- final LabelNode[] labels)
- {
+ public LookupSwitchInsnNode(final LabelNode dflt, final int[] keys,
+ final LabelNode[] labels) {
super(Opcodes.LOOKUPSWITCH);
this.dflt = dflt;
this.keys = new ArrayList<Integer>(keys == null ? 0 : keys.length);
- this.labels = new ArrayList<LabelNode>(labels == null ? 0 : labels.length);
+ this.labels = new ArrayList<LabelNode>(labels == null ? 0
+ : labels.length);
if (keys != null) {
for (int i = 0; i < keys.length; ++i) {
this.keys.add(new Integer(keys[i]));
diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
index c3036bc6b4..bf09f556d8 100644
--- a/src/asm/scala/tools/asm/tree/MethodInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
@@ -43,7 +43,7 @@ public class MethodInsnNode extends AbstractInsnNode {
/**
* The internal name of the method's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String owner;
@@ -53,27 +53,28 @@ public class MethodInsnNode extends AbstractInsnNode {
public String name;
/**
- * The method's descriptor (see {@link org.objectweb.asm.Type}).
+ * The method's descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
/**
* Constructs a new {@link MethodInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
- * INVOKEINTERFACE.
- * @param owner the internal name of the method's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link org.objectweb.asm.Type}).
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner
+ * the internal name of the method's owner class (see
+ * {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link scala.tools.asm.Type}).
*/
- public MethodInsnNode(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public MethodInsnNode(final int opcode, final String owner,
+ final String name, final String desc) {
super(opcode);
this.owner = owner;
this.name = name;
@@ -83,8 +84,9 @@ public class MethodInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be
- * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
+ * @param opcode
+ * the new instruction opcode. This opcode must be INVOKEVIRTUAL,
+ * INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
@@ -104,4 +106,4 @@ public class MethodInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new MethodInsnNode(opcode, owner, name, desc);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
index 70ec39e058..5f9c778e0c 100644
--- a/src/asm/scala/tools/asm/tree/MethodNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
@@ -81,7 +81,7 @@ public class MethodNode extends MethodVisitor {
* The runtime visible annotations of this method. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible
*/
public List<AnnotationNode> visibleAnnotations;
@@ -90,7 +90,7 @@ public class MethodNode extends MethodVisitor {
* The runtime invisible annotations of this method. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible
*/
public List<AnnotationNode> invisibleAnnotations;
@@ -99,7 +99,7 @@ public class MethodNode extends MethodVisitor {
* The non standard attributes of this method. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.Attribute
+ * @associates scala.tools.asm.Attribute
*/
public List<Attribute> attrs;
@@ -117,7 +117,7 @@ public class MethodNode extends MethodVisitor {
* The runtime visible parameter annotations of this method. These lists are
* lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible parameters
*/
public List<AnnotationNode>[] visibleParameterAnnotations;
@@ -126,7 +126,7 @@ public class MethodNode extends MethodVisitor {
* The runtime invisible parameter annotations of this method. These lists
* are lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible parameters
*/
public List<AnnotationNode>[] invisibleParameterAnnotations;
@@ -135,7 +135,7 @@ public class MethodNode extends MethodVisitor {
* The instructions of this method. This list is a list of
* {@link AbstractInsnNode} objects.
*
- * @associates org.objectweb.asm.tree.AbstractInsnNode
+ * @associates scala.tools.asm.tree.AbstractInsnNode
* @label instructions
*/
public InsnList instructions;
@@ -144,7 +144,7 @@ public class MethodNode extends MethodVisitor {
* The try catch blocks of this method. This list is a list of
* {@link TryCatchBlockNode} objects.
*
- * @associates org.objectweb.asm.tree.TryCatchBlockNode
+ * @associates scala.tools.asm.tree.TryCatchBlockNode
*/
public List<TryCatchBlockNode> tryCatchBlocks;
@@ -162,7 +162,7 @@ public class MethodNode extends MethodVisitor {
* The local variables of this method. This list is a list of
* {@link LocalVariableNode} objects. May be <tt>null</tt>
*
- * @associates org.objectweb.asm.tree.LocalVariableNode
+ * @associates scala.tools.asm.tree.LocalVariableNode
*/
public List<LocalVariableNode> localVariables;
@@ -170,7 +170,7 @@ public class MethodNode extends MethodVisitor {
* If the accept method has been called on this object.
*/
private boolean visited;
-
+
/**
* Constructs an uninitialized {@link MethodNode}. <i>Subclasses must not
* use this constructor</i>. Instead, they must use the
@@ -183,8 +183,9 @@ public class MethodNode extends MethodVisitor {
/**
* Constructs an uninitialized {@link MethodNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public MethodNode(final int api) {
super(api);
@@ -196,56 +197,55 @@ public class MethodNode extends MethodVisitor {
* constructor</i>. Instead, they must use the
* {@link #MethodNode(int, int, String, String, String, String[])} version.
*
- * @param access the method's access flags (see {@link Opcodes}). This
- * parameter also indicates if the method is synthetic and/or
- * deprecated.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type}).
- * @param signature the method's signature. May be <tt>null</tt>.
- * @param exceptions the internal names of the method's exception classes
- * (see {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param access
+ * the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt>.
+ * @param exceptions
+ * the internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
- public MethodNode(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public MethodNode(final int access, final String name, final String desc,
+ final String signature, final String[] exceptions) {
this(Opcodes.ASM4, access, name, desc, signature, exceptions);
}
/**
* Constructs a new {@link MethodNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param access the method's access flags (see {@link Opcodes}). This
- * parameter also indicates if the method is synthetic and/or
- * deprecated.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type}).
- * @param signature the method's signature. May be <tt>null</tt>.
- * @param exceptions the internal names of the method's exception classes
- * (see {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access
+ * the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt>.
+ * @param exceptions
+ * the internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
- public MethodNode(
- final int api,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public MethodNode(final int api, final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
super(api);
this.access = access;
this.name = name;
this.desc = desc;
this.signature = signature;
- this.exceptions = new ArrayList<String>(exceptions == null
- ? 0
+ this.exceptions = new ArrayList<String>(exceptions == null ? 0
: exceptions.length);
boolean isAbstract = (access & Opcodes.ACC_ABSTRACT) != 0;
if (!isAbstract) {
@@ -274,10 +274,8 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleAnnotations == null) {
@@ -294,28 +292,27 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleParameterAnnotations == null) {
int params = Type.getArgumentTypes(this.desc).length;
- visibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ visibleParameterAnnotations = (List<AnnotationNode>[]) new List<?>[params];
}
if (visibleParameterAnnotations[parameter] == null) {
- visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(
+ 1);
}
visibleParameterAnnotations[parameter].add(an);
} else {
if (invisibleParameterAnnotations == null) {
int params = Type.getArgumentTypes(this.desc).length;
- invisibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ invisibleParameterAnnotations = (List<AnnotationNode>[]) new List<?>[params];
}
if (invisibleParameterAnnotations[parameter] == null) {
- invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(
+ 1);
}
invisibleParameterAnnotations[parameter].add(an);
}
@@ -335,17 +332,10 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
- instructions.add(new FrameNode(type, nLocal, local == null
- ? null
- : getLabelNodes(local), nStack, stack == null
- ? null
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
+ instructions.add(new FrameNode(type, nLocal, local == null ? null
+ : getLabelNodes(local), nStack, stack == null ? null
: getLabelNodes(stack)));
}
@@ -370,32 +360,20 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
instructions.add(new FieldInsnNode(opcode, owner, name, desc));
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
instructions.add(new MethodInsnNode(opcode, owner, name, desc));
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
instructions.add(new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs));
}
@@ -420,26 +398,16 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
- instructions.add(new TableSwitchInsnNode(min,
- max,
- getLabelNode(dflt),
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
+ instructions.add(new TableSwitchInsnNode(min, max, getLabelNode(dflt),
getLabelNodes(labels)));
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
- instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt),
- keys,
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
+ instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt), keys,
getLabelNodes(labels)));
}
@@ -449,33 +417,18 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
tryCatchBlocks.add(new TryCatchBlockNode(getLabelNode(start),
- getLabelNode(end),
- getLabelNode(handler),
- type));
+ getLabelNode(end), getLabelNode(handler), type));
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
- localVariables.add(new LocalVariableNode(name,
- desc,
- signature,
- getLabelNode(start),
- getLabelNode(end),
- index));
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
+ localVariables.add(new LocalVariableNode(name, desc, signature,
+ getLabelNode(start), getLabelNode(end), index));
}
@Override
@@ -499,12 +452,13 @@ public class MethodNode extends MethodVisitor {
* the {@link Label#info} field to store associations between labels and
* label nodes.
*
- * @param l a Label.
+ * @param l
+ * a Label.
* @return the LabelNode corresponding to l.
*/
protected LabelNode getLabelNode(final Label l) {
if (!(l.info instanceof LabelNode)) {
- l.info = new LabelNode(l);
+ l.info = new LabelNode();
}
return (LabelNode) l.info;
}
@@ -539,7 +493,8 @@ public class MethodNode extends MethodVisitor {
* recursively, do not contain elements that were introduced in more recent
* versions of the ASM API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -548,15 +503,13 @@ public class MethodNode extends MethodVisitor {
/**
* Makes the given class visitor visit this method.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
String[] exceptions = new String[this.exceptions.size()];
this.exceptions.toArray(exceptions);
- MethodVisitor mv = cv.visitMethod(access,
- name,
- desc,
- signature,
+ MethodVisitor mv = cv.visitMethod(access, name, desc, signature,
exceptions);
if (mv != null) {
accept(mv);
@@ -566,7 +519,8 @@ public class MethodNode extends MethodVisitor {
/**
* Makes the given method visitor visit this method.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
public void accept(final MethodVisitor mv) {
// visits the method attributes
@@ -588,8 +542,7 @@ public class MethodNode extends MethodVisitor {
AnnotationNode an = invisibleAnnotations.get(i);
an.accept(mv.visitAnnotation(an.desc, false));
}
- n = visibleParameterAnnotations == null
- ? 0
+ n = visibleParameterAnnotations == null ? 0
: visibleParameterAnnotations.length;
for (i = 0; i < n; ++i) {
List<?> l = visibleParameterAnnotations[i];
@@ -601,8 +554,7 @@ public class MethodNode extends MethodVisitor {
an.accept(mv.visitParameterAnnotation(i, an.desc, true));
}
}
- n = invisibleParameterAnnotations == null
- ? 0
+ n = invisibleParameterAnnotations == null ? 0
: invisibleParameterAnnotations.length;
for (i = 0; i < n; ++i) {
List<?> l = invisibleParameterAnnotations[i];
diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
index 9dfba77335..fe5e8832b3 100644
--- a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
@@ -42,7 +42,7 @@ import scala.tools.asm.Opcodes;
public class MultiANewArrayInsnNode extends AbstractInsnNode {
/**
- * An array type descriptor (see {@link org.objectweb.asm.Type}).
+ * An array type descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
@@ -54,8 +54,10 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link MultiANewArrayInsnNode}.
*
- * @param desc an array type descriptor (see {@link org.objectweb.asm.Type}).
- * @param dims number of dimensions of the array to allocate.
+ * @param desc
+ * an array type descriptor (see {@link scala.tools.asm.Type}).
+ * @param dims
+ * number of dimensions of the array to allocate.
*/
public MultiANewArrayInsnNode(final String desc, final int dims) {
super(Opcodes.MULTIANEWARRAY);
@@ -78,4 +80,4 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode {
return new MultiANewArrayInsnNode(desc, dims);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
index 929ad9b32b..9b3c2a3437 100644
--- a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
@@ -69,18 +69,18 @@ public class TableSwitchInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link TableSwitchInsnNode}.
*
- * @param min the minimum key value.
- * @param max the maximum key value.
- * @param dflt beginning of the default handler block.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>min + i</tt> key.
+ * @param min
+ * the minimum key value.
+ * @param max
+ * the maximum key value.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>min + i</tt> key.
*/
- public TableSwitchInsnNode(
- final int min,
- final int max,
- final LabelNode dflt,
- final LabelNode... labels)
- {
+ public TableSwitchInsnNode(final int min, final int max,
+ final LabelNode dflt, final LabelNode... labels) {
super(Opcodes.TABLESWITCH);
this.min = min;
this.max = max;
@@ -107,9 +107,7 @@ public class TableSwitchInsnNode extends AbstractInsnNode {
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new TableSwitchInsnNode(min,
- max,
- clone(dflt, labels),
- clone(this.labels, labels));
+ return new TableSwitchInsnNode(min, max, clone(dflt, labels), clone(
+ this.labels, labels));
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
index 375b4cfcb9..ab4fa97c34 100644
--- a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
+++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
@@ -62,19 +62,19 @@ public class TryCatchBlockNode {
/**
* Constructs a new {@link TryCatchBlockNode}.
*
- * @param start beginning of the exception handler's scope (inclusive).
- * @param end end of the exception handler's scope (exclusive).
- * @param handler beginning of the exception handler's code.
- * @param type internal name of the type of exceptions handled by the
- * handler, or <tt>null</tt> to catch any exceptions (for "finally"
- * blocks).
+ * @param start
+ * beginning of the exception handler's scope (inclusive).
+ * @param end
+ * end of the exception handler's scope (exclusive).
+ * @param handler
+ * beginning of the exception handler's code.
+ * @param type
+ * internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for
+ * "finally" blocks).
*/
- public TryCatchBlockNode(
- final LabelNode start,
- final LabelNode end,
- final LabelNode handler,
- final String type)
- {
+ public TryCatchBlockNode(final LabelNode start, final LabelNode end,
+ final LabelNode handler, final String type) {
this.start = start;
this.end = end;
this.handler = handler;
@@ -84,11 +84,11 @@ public class TryCatchBlockNode {
/**
* Makes the given visitor visit this try catch block.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
public void accept(final MethodVisitor mv) {
- mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), handler == null
- ? null
- : handler.getLabel(), type);
+ mv.visitTryCatchBlock(start.getLabel(), end.getLabel(),
+ handler == null ? null : handler.getLabel(), type);
}
}
diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
index 0b2666c498..3210dd60e6 100644
--- a/src/asm/scala/tools/asm/tree/TypeInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
@@ -43,17 +43,19 @@ public class TypeInsnNode extends AbstractInsnNode {
/**
* The operand of this instruction. This operand is an internal name (see
- * {@link org.objectweb.asm.Type}).
+ * {@link scala.tools.asm.Type}).
*/
public String desc;
/**
* Constructs a new {@link TypeInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
- * @param desc the operand of the instruction to be constructed. This
- * operand is an internal name (see {@link org.objectweb.asm.Type}).
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param desc
+ * the operand of the instruction to be constructed. This operand
+ * is an internal name (see {@link scala.tools.asm.Type}).
*/
public TypeInsnNode(final int opcode, final String desc) {
super(opcode);
@@ -63,8 +65,9 @@ public class TypeInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be NEW,
- * ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param opcode
+ * the new instruction opcode. This opcode must be NEW,
+ * ANEWARRAY, CHECKCAST or INSTANCEOF.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
@@ -84,4 +87,4 @@ public class TypeInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new TypeInsnNode(opcode, desc);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java
index 89f572db59..5dd9ef6726 100644
--- a/src/asm/scala/tools/asm/tree/VarInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java
@@ -51,11 +51,13 @@ public class VarInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link VarInsnNode}.
*
- * @param opcode the opcode of the local variable instruction to be
- * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
- * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
- * @param var the operand of the instruction to be constructed. This operand
- * is the index of a local variable.
+ * @param opcode
+ * the opcode of the local variable instruction to be
+ * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
+ * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var
+ * the operand of the instruction to be constructed. This operand
+ * is the index of a local variable.
*/
public VarInsnNode(final int opcode, final int var) {
super(opcode);
@@ -65,9 +67,10 @@ public class VarInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be ILOAD,
- * LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE
- * or RET.
+ * @param opcode
+ * the new instruction opcode. This opcode must be ILOAD, LLOAD,
+ * FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or
+ * RET.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
@@ -87,4 +90,4 @@ public class VarInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new VarInsnNode(opcode, var);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
index df387b0b8e..0134555f10 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
@@ -51,9 +51,10 @@ import scala.tools.asm.tree.VarInsnNode;
* A semantic bytecode analyzer. <i>This class does not fully check that JSR and
* RET instructions are valid.</i>
*
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ * type of the Value used for the analysis.
*
- * @author Eric Bruneton
+ * @author Eric Bruneton
*/
public class Analyzer<V extends Value> implements Opcodes {
@@ -78,8 +79,9 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Constructs a new {@link Analyzer}.
*
- * @param interpreter the interpreter to be used to symbolically interpret
- * the bytecode instructions.
+ * @param interpreter
+ * the interpreter to be used to symbolically interpret the
+ * bytecode instructions.
*/
public Analyzer(final Interpreter<V> interpreter) {
this.interpreter = interpreter;
@@ -88,26 +90,28 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Analyzes the given method.
*
- * @param owner the internal name of the class to which the method belongs.
- * @param m the method to be analyzed.
+ * @param owner
+ * the internal name of the class to which the method belongs.
+ * @param m
+ * the method to be analyzed.
* @return the symbolic state of the execution stack frame at each bytecode
* instruction of the method. The size of the returned array is
* equal to the number of instructions (and labels) of the method. A
* given frame is <tt>null</tt> if and only if the corresponding
* instruction cannot be reached (dead code).
- * @throws AnalyzerException if a problem occurs during the analysis.
+ * @throws AnalyzerException
+ * if a problem occurs during the analysis.
*/
public Frame<V>[] analyze(final String owner, final MethodNode m)
- throws AnalyzerException
- {
+ throws AnalyzerException {
if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) {
- frames = (Frame<V>[])new Frame<?>[0];
+ frames = (Frame<V>[]) new Frame<?>[0];
return frames;
}
n = m.instructions.size();
insns = m.instructions;
- handlers = (List<TryCatchBlockNode>[])new List<?>[n];
- frames = (Frame<V>[])new Frame<?>[n];
+ handlers = (List<TryCatchBlockNode>[]) new List<?>[n];
+ frames = (Frame<V>[]) new Frame<?>[n];
subroutines = new Subroutine[n];
queued = new boolean[n];
queue = new int[n];
@@ -188,8 +192,7 @@ public class Analyzer<V extends Value> implements Opcodes {
if (insnType == AbstractInsnNode.LABEL
|| insnType == AbstractInsnNode.LINE
- || insnType == AbstractInsnNode.FRAME)
- {
+ || insnType == AbstractInsnNode.FRAME) {
merge(insn + 1, f, subroutine);
newControlFlowEdge(insn, insn + 1);
} else {
@@ -205,8 +208,7 @@ public class Analyzer<V extends Value> implements Opcodes {
int jump = insns.indexOf(j.label);
if (insnOpcode == JSR) {
merge(jump, current, new Subroutine(j.label,
- m.maxLocals,
- j));
+ m.maxLocals, j));
} else {
merge(jump, current, subroutine);
}
@@ -235,31 +237,27 @@ public class Analyzer<V extends Value> implements Opcodes {
}
} else if (insnOpcode == RET) {
if (subroutine == null) {
- throw new AnalyzerException(insnNode, "RET instruction outside of a sub routine");
+ throw new AnalyzerException(insnNode,
+ "RET instruction outside of a sub routine");
}
for (int i = 0; i < subroutine.callers.size(); ++i) {
JumpInsnNode caller = subroutine.callers.get(i);
int call = insns.indexOf(caller);
if (frames[call] != null) {
- merge(call + 1,
- frames[call],
- current,
- subroutines[call],
- subroutine.access);
+ merge(call + 1, frames[call], current,
+ subroutines[call], subroutine.access);
newControlFlowEdge(insn, call + 1);
}
}
} else if (insnOpcode != ATHROW
- && (insnOpcode < IRETURN || insnOpcode > RETURN))
- {
+ && (insnOpcode < IRETURN || insnOpcode > RETURN)) {
if (subroutine != null) {
if (insnNode instanceof VarInsnNode) {
int var = ((VarInsnNode) insnNode).var;
subroutine.access[var] = true;
if (insnOpcode == LLOAD || insnOpcode == DLOAD
|| insnOpcode == LSTORE
- || insnOpcode == DSTORE)
- {
+ || insnOpcode == DSTORE) {
subroutine.access[var + 1] = true;
}
} else if (insnNode instanceof IincInsnNode) {
@@ -292,23 +290,23 @@ public class Analyzer<V extends Value> implements Opcodes {
}
}
} catch (AnalyzerException e) {
- throw new AnalyzerException(e.node, "Error at instruction " + insn
- + ": " + e.getMessage(), e);
+ throw new AnalyzerException(e.node, "Error at instruction "
+ + insn + ": " + e.getMessage(), e);
} catch (Exception e) {
- throw new AnalyzerException(insnNode, "Error at instruction " + insn
- + ": " + e.getMessage(), e);
+ throw new AnalyzerException(insnNode, "Error at instruction "
+ + insn + ": " + e.getMessage(), e);
}
}
return frames;
}
- private void findSubroutine(int insn, final Subroutine sub, final List<AbstractInsnNode> calls)
- throws AnalyzerException
- {
+ private void findSubroutine(int insn, final Subroutine sub,
+ final List<AbstractInsnNode> calls) throws AnalyzerException {
while (true) {
if (insn < 0 || insn >= n) {
- throw new AnalyzerException(null, "Execution can fall off end of the code");
+ throw new AnalyzerException(null,
+ "Execution can fall off end of the code");
}
if (subroutines[insn] != null) {
return;
@@ -352,18 +350,18 @@ public class Analyzer<V extends Value> implements Opcodes {
// if insn does not falls through to the next instruction, return.
switch (node.getOpcode()) {
- case GOTO:
- case RET:
- case TABLESWITCH:
- case LOOKUPSWITCH:
- case IRETURN:
- case LRETURN:
- case FRETURN:
- case DRETURN:
- case ARETURN:
- case RETURN:
- case ATHROW:
- return;
+ case GOTO:
+ case RET:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case RETURN:
+ case ATHROW:
+ return;
}
insn++;
}
@@ -387,8 +385,9 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Returns the exception handlers for the given instruction.
*
- * @param insn the index of an instruction of the last recently analyzed
- * method.
+ * @param insn
+ * the index of an instruction of the last recently analyzed
+ * method.
* @return a list of {@link TryCatchBlockNode} objects.
*/
public List<TryCatchBlockNode> getHandlers(final int insn) {
@@ -400,9 +399,12 @@ public class Analyzer<V extends Value> implements Opcodes {
* execution of control flow analysis loop in #analyze. The default
* implementation of this method does nothing.
*
- * @param owner the internal name of the class to which the method belongs.
- * @param m the method to be analyzed.
- * @throws AnalyzerException if a problem occurs.
+ * @param owner
+ * the internal name of the class to which the method belongs.
+ * @param m
+ * the method to be analyzed.
+ * @throws AnalyzerException
+ * if a problem occurs.
*/
protected void init(String owner, MethodNode m) throws AnalyzerException {
}
@@ -410,8 +412,10 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Constructs a new frame with the given size.
*
- * @param nLocals the maximum number of local variables of the frame.
- * @param nStack the maximum stack size of the frame.
+ * @param nLocals
+ * the maximum number of local variables of the frame.
+ * @param nStack
+ * the maximum stack size of the frame.
* @return the created frame.
*/
protected Frame<V> newFrame(final int nLocals, final int nStack) {
@@ -421,7 +425,8 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Constructs a new frame that is identical to the given frame.
*
- * @param src a frame.
+ * @param src
+ * a frame.
* @return the created frame.
*/
protected Frame<V> newFrame(final Frame<? extends V> src) {
@@ -434,8 +439,10 @@ public class Analyzer<V extends Value> implements Opcodes {
* control flow graph of a method (this method is called by the
* {@link #analyze analyze} method during its visit of the method's code).
*
- * @param insn an instruction index.
- * @param successor index of a successor instruction.
+ * @param insn
+ * an instruction index.
+ * @param successor
+ * index of a successor instruction.
*/
protected void newControlFlowEdge(final int insn, final int successor) {
}
@@ -447,16 +454,16 @@ public class Analyzer<V extends Value> implements Opcodes {
* method is called by the {@link #analyze analyze} method during its visit
* of the method's code).
*
- * @param insn an instruction index.
- * @param successor index of a successor instruction.
+ * @param insn
+ * an instruction index.
+ * @param successor
+ * index of a successor instruction.
* @return true if this edge must be considered in the data flow analysis
* performed by this analyzer, or false otherwise. The default
* implementation of this method always returns true.
*/
- protected boolean newControlFlowExceptionEdge(
- final int insn,
- final int successor)
- {
+ protected boolean newControlFlowExceptionEdge(final int insn,
+ final int successor) {
return true;
}
@@ -469,28 +476,25 @@ public class Analyzer<V extends Value> implements Opcodes {
* the {@link #analyze analyze} method during its visit of the method's
* code).
*
- * @param insn an instruction index.
- * @param tcb TryCatchBlockNode corresponding to this edge.
+ * @param insn
+ * an instruction index.
+ * @param tcb
+ * TryCatchBlockNode corresponding to this edge.
* @return true if this edge must be considered in the data flow analysis
* performed by this analyzer, or false otherwise. The default
* implementation of this method delegates to
* {@link #newControlFlowExceptionEdge(int, int)
* newControlFlowExceptionEdge(int, int)}.
*/
- protected boolean newControlFlowExceptionEdge(
- final int insn,
- final TryCatchBlockNode tcb)
- {
+ protected boolean newControlFlowExceptionEdge(final int insn,
+ final TryCatchBlockNode tcb) {
return newControlFlowExceptionEdge(insn, insns.indexOf(tcb.handler));
}
// -------------------------------------------------------------------------
- private void merge(
- final int insn,
- final Frame<V> frame,
- final Subroutine subroutine) throws AnalyzerException
- {
+ private void merge(final int insn, final Frame<V> frame,
+ final Subroutine subroutine) throws AnalyzerException {
Frame<V> oldFrame = frames[insn];
Subroutine oldSubroutine = subroutines[insn];
boolean changes;
@@ -518,13 +522,9 @@ public class Analyzer<V extends Value> implements Opcodes {
}
}
- private void merge(
- final int insn,
- final Frame<V> beforeJSR,
- final Frame<V> afterRET,
- final Subroutine subroutineBeforeJSR,
- final boolean[] access) throws AnalyzerException
- {
+ private void merge(final int insn, final Frame<V> beforeJSR,
+ final Frame<V> afterRET, final Subroutine subroutineBeforeJSR,
+ final boolean[] access) throws AnalyzerException {
Frame<V> oldFrame = frames[insn];
Subroutine oldSubroutine = subroutines[insn];
boolean changes;
diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
index a89bb3513f..5e3f51f21a 100644
--- a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
+++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
@@ -46,17 +46,14 @@ public class AnalyzerException extends Exception {
this.node = node;
}
- public AnalyzerException(final AbstractInsnNode node, final String msg, final Throwable exception) {
+ public AnalyzerException(final AbstractInsnNode node, final String msg,
+ final Throwable exception) {
super(msg, exception);
this.node = node;
}
- public AnalyzerException(
- final AbstractInsnNode node,
- final String msg,
- final Object expected,
- final Value encountered)
- {
+ public AnalyzerException(final AbstractInsnNode node, final String msg,
+ final Object expected, final Value encountered) {
super((msg == null ? "Expected " : msg + ": expected ") + expected
+ ", but found " + encountered);
this.node = node;
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
index 64ddcc11e6..8d6653c1c5 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
@@ -50,8 +50,7 @@ import scala.tools.asm.tree.TypeInsnNode;
* @author Bing Ran
*/
public class BasicInterpreter extends Interpreter<BasicValue> implements
- Opcodes
-{
+ Opcodes {
public BasicInterpreter() {
super(ASM4);
@@ -67,292 +66,286 @@ public class BasicInterpreter extends Interpreter<BasicValue> implements
return BasicValue.UNINITIALIZED_VALUE;
}
switch (type.getSort()) {
- case Type.VOID:
- return null;
- case Type.BOOLEAN:
- case Type.CHAR:
- case Type.BYTE:
- case Type.SHORT:
- case Type.INT:
- return BasicValue.INT_VALUE;
- case Type.FLOAT:
- return BasicValue.FLOAT_VALUE;
- case Type.LONG:
- return BasicValue.LONG_VALUE;
- case Type.DOUBLE:
- return BasicValue.DOUBLE_VALUE;
- case Type.ARRAY:
- case Type.OBJECT:
- return BasicValue.REFERENCE_VALUE;
- default:
- throw new Error("Internal error");
+ case Type.VOID:
+ return null;
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ case Type.INT:
+ return BasicValue.INT_VALUE;
+ case Type.FLOAT:
+ return BasicValue.FLOAT_VALUE;
+ case Type.LONG:
+ return BasicValue.LONG_VALUE;
+ case Type.DOUBLE:
+ return BasicValue.DOUBLE_VALUE;
+ case Type.ARRAY:
+ case Type.OBJECT:
+ return BasicValue.REFERENCE_VALUE;
+ default:
+ throw new Error("Internal error");
}
}
@Override
public BasicValue newOperation(final AbstractInsnNode insn)
- throws AnalyzerException
- {
+ throws AnalyzerException {
switch (insn.getOpcode()) {
- case ACONST_NULL:
- return newValue(Type.getObjectType("null"));
- case ICONST_M1:
- case ICONST_0:
- case ICONST_1:
- case ICONST_2:
- case ICONST_3:
- case ICONST_4:
- case ICONST_5:
+ case ACONST_NULL:
+ return newValue(Type.getObjectType("null"));
+ case ICONST_M1:
+ case ICONST_0:
+ case ICONST_1:
+ case ICONST_2:
+ case ICONST_3:
+ case ICONST_4:
+ case ICONST_5:
+ return BasicValue.INT_VALUE;
+ case LCONST_0:
+ case LCONST_1:
+ return BasicValue.LONG_VALUE;
+ case FCONST_0:
+ case FCONST_1:
+ case FCONST_2:
+ return BasicValue.FLOAT_VALUE;
+ case DCONST_0:
+ case DCONST_1:
+ return BasicValue.DOUBLE_VALUE;
+ case BIPUSH:
+ case SIPUSH:
+ return BasicValue.INT_VALUE;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ if (cst instanceof Integer) {
return BasicValue.INT_VALUE;
- case LCONST_0:
- case LCONST_1:
- return BasicValue.LONG_VALUE;
- case FCONST_0:
- case FCONST_1:
- case FCONST_2:
+ } else if (cst instanceof Float) {
return BasicValue.FLOAT_VALUE;
- case DCONST_0:
- case DCONST_1:
+ } else if (cst instanceof Long) {
+ return BasicValue.LONG_VALUE;
+ } else if (cst instanceof Double) {
return BasicValue.DOUBLE_VALUE;
- case BIPUSH:
- case SIPUSH:
- return BasicValue.INT_VALUE;
- case LDC:
- Object cst = ((LdcInsnNode) insn).cst;
- if (cst instanceof Integer) {
- return BasicValue.INT_VALUE;
- } else if (cst instanceof Float) {
- return BasicValue.FLOAT_VALUE;
- } else if (cst instanceof Long) {
- return BasicValue.LONG_VALUE;
- } else if (cst instanceof Double) {
- return BasicValue.DOUBLE_VALUE;
- } else if (cst instanceof String) {
- return newValue(Type.getObjectType("java/lang/String"));
- } else if (cst instanceof Type) {
- int sort = ((Type) cst).getSort();
- if (sort == Type.OBJECT || sort == Type.ARRAY) {
- return newValue(Type.getObjectType("java/lang/Class"));
- } else if (sort == Type.METHOD) {
- return newValue(Type.getObjectType("java/lang/invoke/MethodType"));
- } else {
- throw new IllegalArgumentException("Illegal LDC constant " + cst);
- }
- } else if (cst instanceof Handle) {
- return newValue(Type.getObjectType("java/lang/invoke/MethodHandle"));
+ } else if (cst instanceof String) {
+ return newValue(Type.getObjectType("java/lang/String"));
+ } else if (cst instanceof Type) {
+ int sort = ((Type) cst).getSort();
+ if (sort == Type.OBJECT || sort == Type.ARRAY) {
+ return newValue(Type.getObjectType("java/lang/Class"));
+ } else if (sort == Type.METHOD) {
+ return newValue(Type
+ .getObjectType("java/lang/invoke/MethodType"));
} else {
- throw new IllegalArgumentException("Illegal LDC constant " + cst);
+ throw new IllegalArgumentException("Illegal LDC constant "
+ + cst);
}
- case JSR:
- return BasicValue.RETURNADDRESS_VALUE;
- case GETSTATIC:
- return newValue(Type.getType(((FieldInsnNode) insn).desc));
- case NEW:
- return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
- default:
- throw new Error("Internal error.");
+ } else if (cst instanceof Handle) {
+ return newValue(Type
+ .getObjectType("java/lang/invoke/MethodHandle"));
+ } else {
+ throw new IllegalArgumentException("Illegal LDC constant "
+ + cst);
+ }
+ case JSR:
+ return BasicValue.RETURNADDRESS_VALUE;
+ case GETSTATIC:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEW:
+ return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
+ default:
+ throw new Error("Internal error.");
}
}
@Override
- public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue copyOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
return value;
}
@Override
- public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue unaryOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
switch (insn.getOpcode()) {
- case INEG:
- case IINC:
- case L2I:
- case F2I:
- case D2I:
- case I2B:
- case I2C:
- case I2S:
- return BasicValue.INT_VALUE;
- case FNEG:
- case I2F:
- case L2F:
- case D2F:
- return BasicValue.FLOAT_VALUE;
- case LNEG:
- case I2L:
- case F2L:
- case D2L:
- return BasicValue.LONG_VALUE;
- case DNEG:
- case I2D:
- case L2D:
- case F2D:
- return BasicValue.DOUBLE_VALUE;
- case IFEQ:
- case IFNE:
- case IFLT:
- case IFGE:
- case IFGT:
- case IFLE:
- case TABLESWITCH:
- case LOOKUPSWITCH:
- case IRETURN:
- case LRETURN:
- case FRETURN:
- case DRETURN:
- case ARETURN:
- case PUTSTATIC:
- return null;
- case GETFIELD:
- return newValue(Type.getType(((FieldInsnNode) insn).desc));
- case NEWARRAY:
- switch (((IntInsnNode) insn).operand) {
- case T_BOOLEAN:
- return newValue(Type.getType("[Z"));
- case T_CHAR:
- return newValue(Type.getType("[C"));
- case T_BYTE:
- return newValue(Type.getType("[B"));
- case T_SHORT:
- return newValue(Type.getType("[S"));
- case T_INT:
- return newValue(Type.getType("[I"));
- case T_FLOAT:
- return newValue(Type.getType("[F"));
- case T_DOUBLE:
- return newValue(Type.getType("[D"));
- case T_LONG:
- return newValue(Type.getType("[J"));
- default:
- throw new AnalyzerException(insn, "Invalid array type");
- }
- case ANEWARRAY:
- String desc = ((TypeInsnNode) insn).desc;
- return newValue(Type.getType("[" + Type.getObjectType(desc)));
- case ARRAYLENGTH:
- return BasicValue.INT_VALUE;
- case ATHROW:
- return null;
- case CHECKCAST:
- desc = ((TypeInsnNode) insn).desc;
- return newValue(Type.getObjectType(desc));
- case INSTANCEOF:
- return BasicValue.INT_VALUE;
- case MONITORENTER:
- case MONITOREXIT:
- case IFNULL:
- case IFNONNULL:
- return null;
+ case INEG:
+ case IINC:
+ case L2I:
+ case F2I:
+ case D2I:
+ case I2B:
+ case I2C:
+ case I2S:
+ return BasicValue.INT_VALUE;
+ case FNEG:
+ case I2F:
+ case L2F:
+ case D2F:
+ return BasicValue.FLOAT_VALUE;
+ case LNEG:
+ case I2L:
+ case F2L:
+ case D2L:
+ return BasicValue.LONG_VALUE;
+ case DNEG:
+ case I2D:
+ case L2D:
+ case F2D:
+ return BasicValue.DOUBLE_VALUE;
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case PUTSTATIC:
+ return null;
+ case GETFIELD:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEWARRAY:
+ switch (((IntInsnNode) insn).operand) {
+ case T_BOOLEAN:
+ return newValue(Type.getType("[Z"));
+ case T_CHAR:
+ return newValue(Type.getType("[C"));
+ case T_BYTE:
+ return newValue(Type.getType("[B"));
+ case T_SHORT:
+ return newValue(Type.getType("[S"));
+ case T_INT:
+ return newValue(Type.getType("[I"));
+ case T_FLOAT:
+ return newValue(Type.getType("[F"));
+ case T_DOUBLE:
+ return newValue(Type.getType("[D"));
+ case T_LONG:
+ return newValue(Type.getType("[J"));
default:
- throw new Error("Internal error.");
+ throw new AnalyzerException(insn, "Invalid array type");
+ }
+ case ANEWARRAY:
+ String desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getType("[" + Type.getObjectType(desc)));
+ case ARRAYLENGTH:
+ return BasicValue.INT_VALUE;
+ case ATHROW:
+ return null;
+ case CHECKCAST:
+ desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getObjectType(desc));
+ case INSTANCEOF:
+ return BasicValue.INT_VALUE;
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ return null;
+ default:
+ throw new Error("Internal error.");
}
}
@Override
- public BasicValue binaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2) throws AnalyzerException
- {
+ public BasicValue binaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2)
+ throws AnalyzerException {
switch (insn.getOpcode()) {
- case IALOAD:
- case BALOAD:
- case CALOAD:
- case SALOAD:
- case IADD:
- case ISUB:
- case IMUL:
- case IDIV:
- case IREM:
- case ISHL:
- case ISHR:
- case IUSHR:
- case IAND:
- case IOR:
- case IXOR:
- return BasicValue.INT_VALUE;
- case FALOAD:
- case FADD:
- case FSUB:
- case FMUL:
- case FDIV:
- case FREM:
- return BasicValue.FLOAT_VALUE;
- case LALOAD:
- case LADD:
- case LSUB:
- case LMUL:
- case LDIV:
- case LREM:
- case LSHL:
- case LSHR:
- case LUSHR:
- case LAND:
- case LOR:
- case LXOR:
- return BasicValue.LONG_VALUE;
- case DALOAD:
- case DADD:
- case DSUB:
- case DMUL:
- case DDIV:
- case DREM:
- return BasicValue.DOUBLE_VALUE;
- case AALOAD:
- return BasicValue.REFERENCE_VALUE;
- case LCMP:
- case FCMPL:
- case FCMPG:
- case DCMPL:
- case DCMPG:
- return BasicValue.INT_VALUE;
- case IF_ICMPEQ:
- case IF_ICMPNE:
- case IF_ICMPLT:
- case IF_ICMPGE:
- case IF_ICMPGT:
- case IF_ICMPLE:
- case IF_ACMPEQ:
- case IF_ACMPNE:
- case PUTFIELD:
- return null;
- default:
- throw new Error("Internal error.");
+ case IALOAD:
+ case BALOAD:
+ case CALOAD:
+ case SALOAD:
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ return BasicValue.INT_VALUE;
+ case FALOAD:
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ return BasicValue.FLOAT_VALUE;
+ case LALOAD:
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ return BasicValue.LONG_VALUE;
+ case DALOAD:
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ return BasicValue.DOUBLE_VALUE;
+ case AALOAD:
+ return BasicValue.REFERENCE_VALUE;
+ case LCMP:
+ case FCMPL:
+ case FCMPG:
+ case DCMPL:
+ case DCMPG:
+ return BasicValue.INT_VALUE;
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ case PUTFIELD:
+ return null;
+ default:
+ throw new Error("Internal error.");
}
}
@Override
- public BasicValue ternaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2,
- final BasicValue value3) throws AnalyzerException
- {
+ public BasicValue ternaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException {
return null;
}
@Override
- public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
- throws AnalyzerException
- {
+ public BasicValue naryOperation(final AbstractInsnNode insn,
+ final List<? extends BasicValue> values) throws AnalyzerException {
int opcode = insn.getOpcode();
if (opcode == MULTIANEWARRAY) {
return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc));
- } else if (opcode == INVOKEDYNAMIC){
- return newValue(Type.getReturnType(((InvokeDynamicInsnNode) insn).desc));
+ } else if (opcode == INVOKEDYNAMIC) {
+ return newValue(Type
+ .getReturnType(((InvokeDynamicInsnNode) insn).desc));
} else {
return newValue(Type.getReturnType(((MethodInsnNode) insn).desc));
}
}
@Override
- public void returnOperation(
- final AbstractInsnNode insn,
- final BasicValue value,
- final BasicValue expected) throws AnalyzerException
- {
+ public void returnOperation(final AbstractInsnNode insn,
+ final BasicValue value, final BasicValue expected)
+ throws AnalyzerException {
}
@Override
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
index 6c449db9b0..439941fb9f 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
@@ -48,11 +48,14 @@ public class BasicValue implements Value {
public static final BasicValue LONG_VALUE = new BasicValue(Type.LONG_TYPE);
- public static final BasicValue DOUBLE_VALUE = new BasicValue(Type.DOUBLE_TYPE);
+ public static final BasicValue DOUBLE_VALUE = new BasicValue(
+ Type.DOUBLE_TYPE);
- public static final BasicValue REFERENCE_VALUE = new BasicValue(Type.getObjectType("java/lang/Object"));
+ public static final BasicValue REFERENCE_VALUE = new BasicValue(
+ Type.getObjectType("java/lang/Object"));
- public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(Type.VOID_TYPE);
+ public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(
+ Type.VOID_TYPE);
private final Type type;
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
index 9297dd9294..71666edb74 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
@@ -55,47 +55,41 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue copyOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
Value expected;
switch (insn.getOpcode()) {
- case ILOAD:
- case ISTORE:
- expected = BasicValue.INT_VALUE;
- break;
- case FLOAD:
- case FSTORE:
- expected = BasicValue.FLOAT_VALUE;
- break;
- case LLOAD:
- case LSTORE:
- expected = BasicValue.LONG_VALUE;
- break;
- case DLOAD:
- case DSTORE:
- expected = BasicValue.DOUBLE_VALUE;
- break;
- case ALOAD:
- if (!value.isReference()) {
- throw new AnalyzerException(insn,
- null,
- "an object reference",
- value);
- }
- return value;
- case ASTORE:
- if (!value.isReference()
- && !BasicValue.RETURNADDRESS_VALUE.equals(value))
- {
- throw new AnalyzerException(insn,
- null,
- "an object reference or a return address",
- value);
- }
- return value;
- default:
- return value;
+ case ILOAD:
+ case ISTORE:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FLOAD:
+ case FSTORE:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LLOAD:
+ case LSTORE:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DLOAD:
+ case DSTORE:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case ALOAD:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn, null, "an object reference",
+ value);
+ }
+ return value;
+ case ASTORE:
+ if (!value.isReference()
+ && !BasicValue.RETURNADDRESS_VALUE.equals(value)) {
+ throw new AnalyzerException(insn, null,
+ "an object reference or a return address", value);
+ }
+ return value;
+ default:
+ return value;
}
if (!expected.equals(value)) {
throw new AnalyzerException(insn, null, expected, value);
@@ -104,91 +98,85 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue unaryOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
BasicValue expected;
switch (insn.getOpcode()) {
- case INEG:
- case IINC:
- case I2F:
- case I2L:
- case I2D:
- case I2B:
- case I2C:
- case I2S:
- case IFEQ:
- case IFNE:
- case IFLT:
- case IFGE:
- case IFGT:
- case IFLE:
- case TABLESWITCH:
- case LOOKUPSWITCH:
- case IRETURN:
- case NEWARRAY:
- case ANEWARRAY:
- expected = BasicValue.INT_VALUE;
- break;
- case FNEG:
- case F2I:
- case F2L:
- case F2D:
- case FRETURN:
- expected = BasicValue.FLOAT_VALUE;
- break;
- case LNEG:
- case L2I:
- case L2F:
- case L2D:
- case LRETURN:
- expected = BasicValue.LONG_VALUE;
- break;
- case DNEG:
- case D2I:
- case D2F:
- case D2L:
- case DRETURN:
- expected = BasicValue.DOUBLE_VALUE;
- break;
- case GETFIELD:
- expected = newValue(Type.getObjectType(((FieldInsnNode) insn).owner));
- break;
- case CHECKCAST:
- if (!value.isReference()) {
- throw new AnalyzerException(insn,
- null,
- "an object reference",
- value);
- }
- return super.unaryOperation(insn, value);
- case ARRAYLENGTH:
- if (!isArrayValue(value)) {
- throw new AnalyzerException(insn,
- null,
- "an array reference",
- value);
- }
- return super.unaryOperation(insn, value);
- case ARETURN:
- case ATHROW:
- case INSTANCEOF:
- case MONITORENTER:
- case MONITOREXIT:
- case IFNULL:
- case IFNONNULL:
- if (!value.isReference()) {
- throw new AnalyzerException(insn,
- null,
- "an object reference",
- value);
- }
- return super.unaryOperation(insn, value);
- case PUTSTATIC:
- expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
- break;
- default:
- throw new Error("Internal error.");
+ case INEG:
+ case IINC:
+ case I2F:
+ case I2L:
+ case I2D:
+ case I2B:
+ case I2C:
+ case I2S:
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case NEWARRAY:
+ case ANEWARRAY:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FNEG:
+ case F2I:
+ case F2L:
+ case F2D:
+ case FRETURN:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LNEG:
+ case L2I:
+ case L2F:
+ case L2D:
+ case LRETURN:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DNEG:
+ case D2I:
+ case D2F:
+ case D2L:
+ case DRETURN:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case GETFIELD:
+ expected = newValue(Type
+ .getObjectType(((FieldInsnNode) insn).owner));
+ break;
+ case CHECKCAST:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn, null, "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARRAYLENGTH:
+ if (!isArrayValue(value)) {
+ throw new AnalyzerException(insn, null, "an array reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARETURN:
+ case ATHROW:
+ case INSTANCEOF:
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn, null, "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case PUTSTATIC:
+ expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
+ break;
+ default:
+ throw new Error("Internal error.");
}
if (!isSubTypeOf(value, expected)) {
throw new AnalyzerException(insn, null, expected, value);
@@ -197,125 +185,125 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue binaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2) throws AnalyzerException
- {
+ public BasicValue binaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2)
+ throws AnalyzerException {
BasicValue expected1;
BasicValue expected2;
switch (insn.getOpcode()) {
- case IALOAD:
- expected1 = newValue(Type.getType("[I"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case BALOAD:
- if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
- expected1 = newValue(Type.getType("[Z"));
- } else {
- expected1 = newValue(Type.getType("[B"));
- }
- expected2 = BasicValue.INT_VALUE;
- break;
- case CALOAD:
- expected1 = newValue(Type.getType("[C"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case SALOAD:
- expected1 = newValue(Type.getType("[S"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case LALOAD:
- expected1 = newValue(Type.getType("[J"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case FALOAD:
- expected1 = newValue(Type.getType("[F"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case DALOAD:
- expected1 = newValue(Type.getType("[D"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case AALOAD:
- expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case IADD:
- case ISUB:
- case IMUL:
- case IDIV:
- case IREM:
- case ISHL:
- case ISHR:
- case IUSHR:
- case IAND:
- case IOR:
- case IXOR:
- case IF_ICMPEQ:
- case IF_ICMPNE:
- case IF_ICMPLT:
- case IF_ICMPGE:
- case IF_ICMPGT:
- case IF_ICMPLE:
- expected1 = BasicValue.INT_VALUE;
- expected2 = BasicValue.INT_VALUE;
- break;
- case FADD:
- case FSUB:
- case FMUL:
- case FDIV:
- case FREM:
- case FCMPL:
- case FCMPG:
- expected1 = BasicValue.FLOAT_VALUE;
- expected2 = BasicValue.FLOAT_VALUE;
- break;
- case LADD:
- case LSUB:
- case LMUL:
- case LDIV:
- case LREM:
- case LAND:
- case LOR:
- case LXOR:
- case LCMP:
- expected1 = BasicValue.LONG_VALUE;
- expected2 = BasicValue.LONG_VALUE;
- break;
- case LSHL:
- case LSHR:
- case LUSHR:
- expected1 = BasicValue.LONG_VALUE;
- expected2 = BasicValue.INT_VALUE;
- break;
- case DADD:
- case DSUB:
- case DMUL:
- case DDIV:
- case DREM:
- case DCMPL:
- case DCMPG:
- expected1 = BasicValue.DOUBLE_VALUE;
- expected2 = BasicValue.DOUBLE_VALUE;
- break;
- case IF_ACMPEQ:
- case IF_ACMPNE:
- expected1 = BasicValue.REFERENCE_VALUE;
- expected2 = BasicValue.REFERENCE_VALUE;
- break;
- case PUTFIELD:
- FieldInsnNode fin = (FieldInsnNode) insn;
- expected1 = newValue(Type.getObjectType(fin.owner));
- expected2 = newValue(Type.getType(fin.desc));
- break;
- default:
- throw new Error("Internal error.");
+ case IALOAD:
+ expected1 = newValue(Type.getType("[I"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case BALOAD:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case CALOAD:
+ expected1 = newValue(Type.getType("[C"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case SALOAD:
+ expected1 = newValue(Type.getType("[S"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case LALOAD:
+ expected1 = newValue(Type.getType("[J"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FALOAD:
+ expected1 = newValue(Type.getType("[F"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DALOAD:
+ expected1 = newValue(Type.getType("[D"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case AALOAD:
+ expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ expected1 = BasicValue.INT_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ case FCMPL:
+ case FCMPG:
+ expected1 = BasicValue.FLOAT_VALUE;
+ expected2 = BasicValue.FLOAT_VALUE;
+ break;
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LAND:
+ case LOR:
+ case LXOR:
+ case LCMP:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.LONG_VALUE;
+ break;
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ case DCMPL:
+ case DCMPG:
+ expected1 = BasicValue.DOUBLE_VALUE;
+ expected2 = BasicValue.DOUBLE_VALUE;
+ break;
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ expected1 = BasicValue.REFERENCE_VALUE;
+ expected2 = BasicValue.REFERENCE_VALUE;
+ break;
+ case PUTFIELD:
+ FieldInsnNode fin = (FieldInsnNode) insn;
+ expected1 = newValue(Type.getObjectType(fin.owner));
+ expected2 = newValue(Type.getType(fin.desc));
+ break;
+ default:
+ throw new Error("Internal error.");
}
if (!isSubTypeOf(value1, expected1)) {
- throw new AnalyzerException(insn, "First argument", expected1, value1);
+ throw new AnalyzerException(insn, "First argument", expected1,
+ value1);
} else if (!isSubTypeOf(value2, expected2)) {
- throw new AnalyzerException(insn, "Second argument", expected2, value2);
+ throw new AnalyzerException(insn, "Second argument", expected2,
+ value2);
}
if (insn.getOpcode() == AALOAD) {
return getElementValue(value1);
@@ -325,79 +313,73 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue ternaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2,
- final BasicValue value3) throws AnalyzerException
- {
+ public BasicValue ternaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException {
BasicValue expected1;
BasicValue expected3;
switch (insn.getOpcode()) {
- case IASTORE:
- expected1 = newValue(Type.getType("[I"));
- expected3 = BasicValue.INT_VALUE;
- break;
- case BASTORE:
- if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
- expected1 = newValue(Type.getType("[Z"));
- } else {
- expected1 = newValue(Type.getType("[B"));
- }
- expected3 = BasicValue.INT_VALUE;
- break;
- case CASTORE:
- expected1 = newValue(Type.getType("[C"));
- expected3 = BasicValue.INT_VALUE;
- break;
- case SASTORE:
- expected1 = newValue(Type.getType("[S"));
- expected3 = BasicValue.INT_VALUE;
- break;
- case LASTORE:
- expected1 = newValue(Type.getType("[J"));
- expected3 = BasicValue.LONG_VALUE;
- break;
- case FASTORE:
- expected1 = newValue(Type.getType("[F"));
- expected3 = BasicValue.FLOAT_VALUE;
- break;
- case DASTORE:
- expected1 = newValue(Type.getType("[D"));
- expected3 = BasicValue.DOUBLE_VALUE;
- break;
- case AASTORE:
- expected1 = value1;
- expected3 = BasicValue.REFERENCE_VALUE;
- break;
- default:
- throw new Error("Internal error.");
+ case IASTORE:
+ expected1 = newValue(Type.getType("[I"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case BASTORE:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case CASTORE:
+ expected1 = newValue(Type.getType("[C"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case SASTORE:
+ expected1 = newValue(Type.getType("[S"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case LASTORE:
+ expected1 = newValue(Type.getType("[J"));
+ expected3 = BasicValue.LONG_VALUE;
+ break;
+ case FASTORE:
+ expected1 = newValue(Type.getType("[F"));
+ expected3 = BasicValue.FLOAT_VALUE;
+ break;
+ case DASTORE:
+ expected1 = newValue(Type.getType("[D"));
+ expected3 = BasicValue.DOUBLE_VALUE;
+ break;
+ case AASTORE:
+ expected1 = value1;
+ expected3 = BasicValue.REFERENCE_VALUE;
+ break;
+ default:
+ throw new Error("Internal error.");
}
if (!isSubTypeOf(value1, expected1)) {
- throw new AnalyzerException(insn, "First argument", "a " + expected1
- + " array reference", value1);
+ throw new AnalyzerException(insn, "First argument", "a "
+ + expected1 + " array reference", value1);
} else if (!BasicValue.INT_VALUE.equals(value2)) {
throw new AnalyzerException(insn, "Second argument",
- BasicValue.INT_VALUE,
- value2);
+ BasicValue.INT_VALUE, value2);
} else if (!isSubTypeOf(value3, expected3)) {
- throw new AnalyzerException(insn, "Third argument", expected3, value3);
+ throw new AnalyzerException(insn, "Third argument", expected3,
+ value3);
}
return null;
}
@Override
- public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
- throws AnalyzerException
- {
+ public BasicValue naryOperation(final AbstractInsnNode insn,
+ final List<? extends BasicValue> values) throws AnalyzerException {
int opcode = insn.getOpcode();
if (opcode == MULTIANEWARRAY) {
for (int i = 0; i < values.size(); ++i) {
if (!BasicValue.INT_VALUE.equals(values.get(i))) {
- throw new AnalyzerException(insn,
- null,
- BasicValue.INT_VALUE,
- values.get(i));
+ throw new AnalyzerException(insn, null,
+ BasicValue.INT_VALUE, values.get(i));
}
}
} else {
@@ -407,22 +389,18 @@ public class BasicVerifier extends BasicInterpreter {
Type owner = Type.getObjectType(((MethodInsnNode) insn).owner);
if (!isSubTypeOf(values.get(i++), newValue(owner))) {
throw new AnalyzerException(insn, "Method owner",
- newValue(owner),
- values.get(0));
+ newValue(owner), values.get(0));
}
}
- String desc = (opcode == INVOKEDYNAMIC)?
- ((InvokeDynamicInsnNode) insn).desc:
- ((MethodInsnNode) insn).desc;
+ String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc
+ : ((MethodInsnNode) insn).desc;
Type[] args = Type.getArgumentTypes(desc);
while (i < values.size()) {
BasicValue expected = newValue(args[j++]);
BasicValue encountered = values.get(i++);
if (!isSubTypeOf(encountered, expected)) {
- throw new AnalyzerException(insn,
- "Argument " + j,
- expected,
- encountered);
+ throw new AnalyzerException(insn, "Argument " + j,
+ expected, encountered);
}
}
}
@@ -430,16 +408,12 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public void returnOperation(
- final AbstractInsnNode insn,
- final BasicValue value,
- final BasicValue expected) throws AnalyzerException
- {
+ public void returnOperation(final AbstractInsnNode insn,
+ final BasicValue value, final BasicValue expected)
+ throws AnalyzerException {
if (!isSubTypeOf(value, expected)) {
- throw new AnalyzerException(insn,
- "Incompatible return type",
- expected,
- value);
+ throw new AnalyzerException(insn, "Incompatible return type",
+ expected, value);
}
}
@@ -448,12 +422,12 @@ public class BasicVerifier extends BasicInterpreter {
}
protected BasicValue getElementValue(final BasicValue objectArrayValue)
- throws AnalyzerException
- {
+ throws AnalyzerException {
return BasicValue.REFERENCE_VALUE;
}
- protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ protected boolean isSubTypeOf(final BasicValue value,
+ final BasicValue expected) {
return value.equals(expected);
}
}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java
index fe19c2c9ae..0d92edc4d6 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Frame.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java
@@ -44,10 +44,11 @@ import scala.tools.asm.tree.VarInsnNode;
/**
* A symbolic execution stack frame. A stack frame contains a set of local
* variable slots, and an operand stack. Warning: long and double values are
- * represented by <i>two</i> slots in local variables, and by <i>one</i> slot
- * in the operand stack.
+ * represented by <i>two</i> slots in local variables, and by <i>one</i> slot in
+ * the operand stack.
*
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ * type of the Value used for the analysis.
*
* @author Eric Bruneton
*/
@@ -77,8 +78,10 @@ public class Frame<V extends Value> {
/**
* Constructs a new frame with the given size.
*
- * @param nLocals the maximum number of local variables of the frame.
- * @param nStack the maximum stack size of the frame.
+ * @param nLocals
+ * the maximum number of local variables of the frame.
+ * @param nStack
+ * the maximum stack size of the frame.
*/
public Frame(final int nLocals, final int nStack) {
this.values = (V[]) new Value[nLocals + nStack];
@@ -88,7 +91,8 @@ public class Frame<V extends Value> {
/**
* Constructs a new frame that is identical to the given frame.
*
- * @param src a frame.
+ * @param src
+ * a frame.
*/
public Frame(final Frame<? extends V> src) {
this(src.locals, src.values.length - src.locals);
@@ -98,7 +102,8 @@ public class Frame<V extends Value> {
/**
* Copies the state of the given frame into this frame.
*
- * @param src a frame.
+ * @param src
+ * a frame.
* @return this frame.
*/
public Frame<V> init(final Frame<? extends V> src) {
@@ -111,8 +116,9 @@ public class Frame<V extends Value> {
/**
* Sets the expected return type of the analyzed method.
*
- * @param v the expected return type of the analyzed method, or
- * <tt>null</tt> if the method returns void.
+ * @param v
+ * the expected return type of the analyzed method, or
+ * <tt>null</tt> if the method returns void.
*/
public void setReturn(final V v) {
returnValue = v;
@@ -130,13 +136,16 @@ public class Frame<V extends Value> {
/**
* Returns the value of the given local variable.
*
- * @param i a local variable index.
+ * @param i
+ * a local variable index.
* @return the value of the given local variable.
- * @throws IndexOutOfBoundsException if the variable does not exist.
+ * @throws IndexOutOfBoundsException
+ * if the variable does not exist.
*/
public V getLocal(final int i) throws IndexOutOfBoundsException {
if (i >= locals) {
- throw new IndexOutOfBoundsException("Trying to access an inexistant local variable");
+ throw new IndexOutOfBoundsException(
+ "Trying to access an inexistant local variable");
}
return values[i];
}
@@ -144,15 +153,18 @@ public class Frame<V extends Value> {
/**
* Sets the value of the given local variable.
*
- * @param i a local variable index.
- * @param value the new value of this local variable.
- * @throws IndexOutOfBoundsException if the variable does not exist.
+ * @param i
+ * a local variable index.
+ * @param value
+ * the new value of this local variable.
+ * @throws IndexOutOfBoundsException
+ * if the variable does not exist.
*/
public void setLocal(final int i, final V value)
- throws IndexOutOfBoundsException
- {
+ throws IndexOutOfBoundsException {
if (i >= locals) {
- throw new IndexOutOfBoundsException("Trying to access an inexistant local variable "+i);
+ throw new IndexOutOfBoundsException(
+ "Trying to access an inexistant local variable " + i);
}
values[i] = value;
}
@@ -170,10 +182,11 @@ public class Frame<V extends Value> {
/**
* Returns the value of the given operand stack slot.
*
- * @param i the index of an operand stack slot.
+ * @param i
+ * the index of an operand stack slot.
* @return the value of the given operand stack slot.
- * @throws IndexOutOfBoundsException if the operand stack slot does not
- * exist.
+ * @throws IndexOutOfBoundsException
+ * if the operand stack slot does not exist.
*/
public V getStack(final int i) throws IndexOutOfBoundsException {
return values[i + locals];
@@ -190,11 +203,13 @@ public class Frame<V extends Value> {
* Pops a value from the operand stack of this frame.
*
* @return the value that has been popped from the stack.
- * @throws IndexOutOfBoundsException if the operand stack is empty.
+ * @throws IndexOutOfBoundsException
+ * if the operand stack is empty.
*/
public V pop() throws IndexOutOfBoundsException {
if (top == 0) {
- throw new IndexOutOfBoundsException("Cannot pop operand off an empty stack.");
+ throw new IndexOutOfBoundsException(
+ "Cannot pop operand off an empty stack.");
}
return values[--top + locals];
}
@@ -202,466 +217,469 @@ public class Frame<V extends Value> {
/**
* Pushes a value into the operand stack of this frame.
*
- * @param value the value that must be pushed into the stack.
- * @throws IndexOutOfBoundsException if the operand stack is full.
+ * @param value
+ * the value that must be pushed into the stack.
+ * @throws IndexOutOfBoundsException
+ * if the operand stack is full.
*/
public void push(final V value) throws IndexOutOfBoundsException {
if (top + locals >= values.length) {
- throw new IndexOutOfBoundsException("Insufficient maximum stack size.");
+ throw new IndexOutOfBoundsException(
+ "Insufficient maximum stack size.");
}
values[top++ + locals] = value;
}
- public void execute(
- final AbstractInsnNode insn,
- final Interpreter<V> interpreter) throws AnalyzerException
- {
+ public void execute(final AbstractInsnNode insn,
+ final Interpreter<V> interpreter) throws AnalyzerException {
V value1, value2, value3, value4;
List<V> values;
int var;
switch (insn.getOpcode()) {
- case Opcodes.NOP:
- break;
- case Opcodes.ACONST_NULL:
- case Opcodes.ICONST_M1:
- case Opcodes.ICONST_0:
- case Opcodes.ICONST_1:
- case Opcodes.ICONST_2:
- case Opcodes.ICONST_3:
- case Opcodes.ICONST_4:
- case Opcodes.ICONST_5:
- case Opcodes.LCONST_0:
- case Opcodes.LCONST_1:
- case Opcodes.FCONST_0:
- case Opcodes.FCONST_1:
- case Opcodes.FCONST_2:
- case Opcodes.DCONST_0:
- case Opcodes.DCONST_1:
- case Opcodes.BIPUSH:
- case Opcodes.SIPUSH:
- case Opcodes.LDC:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.ILOAD:
- case Opcodes.LLOAD:
- case Opcodes.FLOAD:
- case Opcodes.DLOAD:
- case Opcodes.ALOAD:
- push(interpreter.copyOperation(insn,
- getLocal(((VarInsnNode) insn).var)));
- break;
- case Opcodes.IALOAD:
- case Opcodes.LALOAD:
- case Opcodes.FALOAD:
- case Opcodes.DALOAD:
- case Opcodes.AALOAD:
- case Opcodes.BALOAD:
- case Opcodes.CALOAD:
- case Opcodes.SALOAD:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.ISTORE:
- case Opcodes.LSTORE:
- case Opcodes.FSTORE:
- case Opcodes.DSTORE:
- case Opcodes.ASTORE:
- value1 = interpreter.copyOperation(insn, pop());
- var = ((VarInsnNode) insn).var;
- setLocal(var, value1);
- if (value1.getSize() == 2) {
- setLocal(var + 1, interpreter.newValue(null));
+ case Opcodes.NOP:
+ break;
+ case Opcodes.ACONST_NULL:
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.LDC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.ILOAD:
+ case Opcodes.LLOAD:
+ case Opcodes.FLOAD:
+ case Opcodes.DLOAD:
+ case Opcodes.ALOAD:
+ push(interpreter.copyOperation(insn,
+ getLocal(((VarInsnNode) insn).var)));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.LALOAD:
+ case Opcodes.FALOAD:
+ case Opcodes.DALOAD:
+ case Opcodes.AALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.LSTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.DSTORE:
+ case Opcodes.ASTORE:
+ value1 = interpreter.copyOperation(insn, pop());
+ var = ((VarInsnNode) insn).var;
+ setLocal(var, value1);
+ if (value1.getSize() == 2) {
+ setLocal(var + 1, interpreter.newValue(null));
+ }
+ if (var > 0) {
+ Value local = getLocal(var - 1);
+ if (local != null && local.getSize() == 2) {
+ setLocal(var - 1, interpreter.newValue(null));
}
- if (var > 0) {
- Value local = getLocal(var - 1);
- if (local != null && local.getSize() == 2) {
- setLocal(var - 1, interpreter.newValue(null));
- }
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.LASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.DASTORE:
+ case Opcodes.AASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ value3 = pop();
+ value2 = pop();
+ value1 = pop();
+ interpreter.ternaryOperation(insn, value1, value2, value3);
+ break;
+ case Opcodes.POP:
+ if (pop().getSize() == 2) {
+ throw new AnalyzerException(insn, "Illegal use of POP");
+ }
+ break;
+ case Opcodes.POP2:
+ if (pop().getSize() == 1) {
+ if (pop().getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of POP2");
}
- break;
- case Opcodes.IASTORE:
- case Opcodes.LASTORE:
- case Opcodes.FASTORE:
- case Opcodes.DASTORE:
- case Opcodes.AASTORE:
- case Opcodes.BASTORE:
- case Opcodes.CASTORE:
- case Opcodes.SASTORE:
- value3 = pop();
+ }
+ break;
+ case Opcodes.DUP:
+ value1 = pop();
+ if (value1.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP");
+ }
+ push(value1);
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.DUP_X1:
+ value1 = pop();
+ value2 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP_X1");
+ }
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ case Opcodes.DUP_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
value2 = pop();
- value1 = pop();
- interpreter.ternaryOperation(insn, value1, value2, value3);
- break;
- case Opcodes.POP:
- if (pop().getSize() == 2) {
- throw new AnalyzerException(insn, "Illegal use of POP");
- }
- break;
- case Opcodes.POP2:
- if (pop().getSize() == 1) {
- if (pop().getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of POP2");
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
}
+ } else {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
}
- break;
- case Opcodes.DUP:
- value1 = pop();
- if (value1.getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of DUP");
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP_X2");
+ case Opcodes.DUP2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ push(value2);
+ push(value1);
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
}
+ } else {
push(value1);
push(interpreter.copyOperation(insn, value1));
break;
- case Opcodes.DUP_X1:
- value1 = pop();
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2");
+ case Opcodes.DUP2_X1:
+ value1 = pop();
+ if (value1.getSize() == 1) {
value2 = pop();
- if (value1.getSize() != 1 || value2.getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of DUP_X1");
- }
- push(interpreter.copyOperation(insn, value1));
- push(value2);
- push(value1);
- break;
- case Opcodes.DUP_X2:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
- push(interpreter.copyOperation(insn, value1));
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- } else {
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value2));
push(interpreter.copyOperation(insn, value1));
+ push(value3);
push(value2);
push(value1);
break;
}
}
- throw new AnalyzerException(insn, "Illegal use of DUP_X2");
- case Opcodes.DUP2:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- push(value2);
- push(value1);
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- break;
- }
- } else {
- push(value1);
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
break;
}
- throw new AnalyzerException(insn, "Illegal use of DUP2");
- case Opcodes.DUP2_X1:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
+ case Opcodes.DUP2_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ value4 = pop();
+ if (value4.getSize() == 1) {
push(interpreter.copyOperation(insn, value2));
push(interpreter.copyOperation(insn, value1));
+ push(value4);
push(value3);
push(value2);
push(value1);
break;
}
- }
- } else {
- value2 = pop();
- if (value2.getSize() == 1) {
+ } else {
+ push(interpreter.copyOperation(insn, value2));
push(interpreter.copyOperation(insn, value1));
+ push(value3);
push(value2);
push(value1);
break;
}
}
- throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
- case Opcodes.DUP2_X2:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
- value4 = pop();
- if (value4.getSize() == 1) {
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- push(value4);
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- } else {
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- }
- } else {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
- push(interpreter.copyOperation(insn, value1));
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- } else {
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
push(interpreter.copyOperation(insn, value1));
+ push(value3);
push(value2);
push(value1);
break;
}
- }
- throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
- case Opcodes.SWAP:
- value2 = pop();
- value1 = pop();
- if (value1.getSize() != 1 || value2.getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of SWAP");
- }
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- break;
- case Opcodes.IADD:
- case Opcodes.LADD:
- case Opcodes.FADD:
- case Opcodes.DADD:
- case Opcodes.ISUB:
- case Opcodes.LSUB:
- case Opcodes.FSUB:
- case Opcodes.DSUB:
- case Opcodes.IMUL:
- case Opcodes.LMUL:
- case Opcodes.FMUL:
- case Opcodes.DMUL:
- case Opcodes.IDIV:
- case Opcodes.LDIV:
- case Opcodes.FDIV:
- case Opcodes.DDIV:
- case Opcodes.IREM:
- case Opcodes.LREM:
- case Opcodes.FREM:
- case Opcodes.DREM:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.INEG:
- case Opcodes.LNEG:
- case Opcodes.FNEG:
- case Opcodes.DNEG:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.ISHL:
- case Opcodes.LSHL:
- case Opcodes.ISHR:
- case Opcodes.LSHR:
- case Opcodes.IUSHR:
- case Opcodes.LUSHR:
- case Opcodes.IAND:
- case Opcodes.LAND:
- case Opcodes.IOR:
- case Opcodes.LOR:
- case Opcodes.IXOR:
- case Opcodes.LXOR:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.IINC:
- var = ((IincInsnNode) insn).var;
- setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
- break;
- case Opcodes.I2L:
- case Opcodes.I2F:
- case Opcodes.I2D:
- case Opcodes.L2I:
- case Opcodes.L2F:
- case Opcodes.L2D:
- case Opcodes.F2I:
- case Opcodes.F2L:
- case Opcodes.F2D:
- case Opcodes.D2I:
- case Opcodes.D2L:
- case Opcodes.D2F:
- case Opcodes.I2B:
- case Opcodes.I2C:
- case Opcodes.I2S:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.LCMP:
- case Opcodes.FCMPL:
- case Opcodes.FCMPG:
- case Opcodes.DCMPL:
- case Opcodes.DCMPG:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.IFEQ:
- case Opcodes.IFNE:
- case Opcodes.IFLT:
- case Opcodes.IFGE:
- case Opcodes.IFGT:
- case Opcodes.IFLE:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.IF_ICMPEQ:
- case Opcodes.IF_ICMPNE:
- case Opcodes.IF_ICMPLT:
- case Opcodes.IF_ICMPGE:
- case Opcodes.IF_ICMPGT:
- case Opcodes.IF_ICMPLE:
- case Opcodes.IF_ACMPEQ:
- case Opcodes.IF_ACMPNE:
- value2 = pop();
- value1 = pop();
- interpreter.binaryOperation(insn, value1, value2);
- break;
- case Opcodes.GOTO:
- break;
- case Opcodes.JSR:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.RET:
- break;
- case Opcodes.TABLESWITCH:
- case Opcodes.LOOKUPSWITCH:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.IRETURN:
- case Opcodes.LRETURN:
- case Opcodes.FRETURN:
- case Opcodes.DRETURN:
- case Opcodes.ARETURN:
- value1 = pop();
- interpreter.unaryOperation(insn, value1);
- interpreter.returnOperation(insn, value1, returnValue);
- break;
- case Opcodes.RETURN:
- if (returnValue != null) {
- throw new AnalyzerException(insn, "Incompatible return type");
- }
- break;
- case Opcodes.GETSTATIC:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.PUTSTATIC:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.GETFIELD:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.PUTFIELD:
- value2 = pop();
- value1 = pop();
- interpreter.binaryOperation(insn, value1, value2);
- break;
- case Opcodes.INVOKEVIRTUAL:
- case Opcodes.INVOKESPECIAL:
- case Opcodes.INVOKESTATIC:
- case Opcodes.INVOKEINTERFACE: {
- values = new ArrayList<V>();
- String desc = ((MethodInsnNode) insn).desc;
- for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
- values.add(0, pop());
- }
- if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
- values.add(0, pop());
- }
- if (Type.getReturnType(desc) == Type.VOID_TYPE) {
- interpreter.naryOperation(insn, values);
} else {
- push(interpreter.naryOperation(insn, values));
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
}
- break;
}
- case Opcodes.INVOKEDYNAMIC: {
- values = new ArrayList<V>();
- String desc = ((InvokeDynamicInsnNode) insn).desc;
- for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
- values.add(0, pop());
- }
- if (Type.getReturnType(desc) == Type.VOID_TYPE) {
- interpreter.naryOperation(insn, values);
- } else {
- push(interpreter.naryOperation(insn, values));
- }
- break;
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
+ case Opcodes.SWAP:
+ value2 = pop();
+ value1 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of SWAP");
}
- case Opcodes.NEW:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.NEWARRAY:
- case Opcodes.ANEWARRAY:
- case Opcodes.ARRAYLENGTH:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.ATHROW:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.CHECKCAST:
- case Opcodes.INSTANCEOF:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.MONITORENTER:
- case Opcodes.MONITOREXIT:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.MULTIANEWARRAY:
- values = new ArrayList<V>();
- for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
- values.add(0, pop());
- }
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.IADD:
+ case Opcodes.LADD:
+ case Opcodes.FADD:
+ case Opcodes.DADD:
+ case Opcodes.ISUB:
+ case Opcodes.LSUB:
+ case Opcodes.FSUB:
+ case Opcodes.DSUB:
+ case Opcodes.IMUL:
+ case Opcodes.LMUL:
+ case Opcodes.FMUL:
+ case Opcodes.DMUL:
+ case Opcodes.IDIV:
+ case Opcodes.LDIV:
+ case Opcodes.FDIV:
+ case Opcodes.DDIV:
+ case Opcodes.IREM:
+ case Opcodes.LREM:
+ case Opcodes.FREM:
+ case Opcodes.DREM:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ISHL:
+ case Opcodes.LSHL:
+ case Opcodes.ISHR:
+ case Opcodes.LSHR:
+ case Opcodes.IUSHR:
+ case Opcodes.LUSHR:
+ case Opcodes.IAND:
+ case Opcodes.LAND:
+ case Opcodes.IOR:
+ case Opcodes.LOR:
+ case Opcodes.IXOR:
+ case Opcodes.LXOR:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IINC:
+ var = ((IincInsnNode) insn).var;
+ setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
+ break;
+ case Opcodes.I2L:
+ case Opcodes.I2F:
+ case Opcodes.I2D:
+ case Opcodes.L2I:
+ case Opcodes.L2F:
+ case Opcodes.L2D:
+ case Opcodes.F2I:
+ case Opcodes.F2L:
+ case Opcodes.F2D:
+ case Opcodes.D2I:
+ case Opcodes.D2L:
+ case Opcodes.D2F:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.GOTO:
+ break;
+ case Opcodes.JSR:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.RET:
+ break;
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IRETURN:
+ case Opcodes.LRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.DRETURN:
+ case Opcodes.ARETURN:
+ value1 = pop();
+ interpreter.unaryOperation(insn, value1);
+ interpreter.returnOperation(insn, value1, returnValue);
+ break;
+ case Opcodes.RETURN:
+ if (returnValue != null) {
+ throw new AnalyzerException(insn, "Incompatible return type");
+ }
+ break;
+ case Opcodes.GETSTATIC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.PUTSTATIC:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.GETFIELD:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.PUTFIELD:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE: {
+ values = new ArrayList<V>();
+ String desc = ((MethodInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
push(interpreter.naryOperation(insn, values));
- break;
- case Opcodes.IFNULL:
- case Opcodes.IFNONNULL:
- interpreter.unaryOperation(insn, pop());
- break;
- default:
- throw new RuntimeException("Illegal opcode "+insn.getOpcode());
+ }
+ break;
+ }
+ case Opcodes.INVOKEDYNAMIC: {
+ values = new ArrayList<V>();
+ String desc = ((InvokeDynamicInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
+ push(interpreter.naryOperation(insn, values));
+ }
+ break;
+ }
+ case Opcodes.NEW:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.NEWARRAY:
+ case Opcodes.ANEWARRAY:
+ case Opcodes.ARRAYLENGTH:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ATHROW:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.CHECKCAST:
+ case Opcodes.INSTANCEOF:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.MULTIANEWARRAY:
+ values = new ArrayList<V>();
+ for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
+ values.add(0, pop());
+ }
+ push(interpreter.naryOperation(insn, values));
+ break;
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ default:
+ throw new RuntimeException("Illegal opcode " + insn.getOpcode());
}
}
/**
* Merges this frame with the given frame.
*
- * @param frame a frame.
- * @param interpreter the interpreter used to merge values.
+ * @param frame
+ * a frame.
+ * @param interpreter
+ * the interpreter used to merge values.
* @return <tt>true</tt> if this frame has been changed as a result of the
* merge operation, or <tt>false</tt> otherwise.
- * @throws AnalyzerException if the frames have incompatible sizes.
+ * @throws AnalyzerException
+ * if the frames have incompatible sizes.
*/
- public boolean merge(final Frame<? extends V> frame, final Interpreter<V> interpreter)
- throws AnalyzerException
- {
+ public boolean merge(final Frame<? extends V> frame,
+ final Interpreter<V> interpreter) throws AnalyzerException {
if (top != frame.top) {
throw new AnalyzerException(null, "Incompatible stack heights");
}
boolean changes = false;
for (int i = 0; i < locals + top; ++i) {
V v = interpreter.merge(values[i], frame.values[i]);
- if (v != values[i]) {
+ if (!v.equals(values[i])) {
values[i] = v;
changes = true;
}
@@ -672,9 +690,11 @@ public class Frame<V extends Value> {
/**
* Merges this frame with the given frame (case of a RET instruction).
*
- * @param frame a frame
- * @param access the local variables that have been accessed by the
- * subroutine to which the RET instruction corresponds.
+ * @param frame
+ * a frame
+ * @param access
+ * the local variables that have been accessed by the subroutine
+ * to which the RET instruction corresponds.
* @return <tt>true</tt> if this frame has been changed as a result of the
* merge operation, or <tt>false</tt> otherwise.
*/
diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
index 930c8f4af8..56f4bedc00 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
@@ -42,7 +42,8 @@ import scala.tools.asm.tree.AbstractInsnNode;
* various semantic interpreters, without needing to duplicate the code to
* simulate the transfer of values.
*
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ * type of the Value used for the analysis.
*
* @author Eric Bruneton
*/
@@ -57,12 +58,13 @@ public abstract class Interpreter<V extends Value> {
/**
* Creates a new value that represents the given type.
*
- * Called for method parameters (including <code>this</code>),
- * exception handler variable and with <code>null</code> type
- * for variables reserved by long and double types.
+ * Called for method parameters (including <code>this</code>), exception
+ * handler variable and with <code>null</code> type for variables reserved
+ * by long and double types.
*
- * @param type a primitive or reference type, or <tt>null</tt> to
- * represent an uninitialized value.
+ * @param type
+ * a primitive or reference type, or <tt>null</tt> to represent
+ * an uninitialized value.
* @return a value that represents the given type. The size of the returned
* value must be equal to the size of the given type.
*/
@@ -76,9 +78,11 @@ public abstract class Interpreter<V extends Value> {
* ICONST_5, LCONST_0, LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
* DCONST_1, BIPUSH, SIPUSH, LDC, JSR, GETSTATIC, NEW
*
- * @param insn the bytecode instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V newOperation(AbstractInsnNode insn)
throws AnalyzerException;
@@ -90,11 +94,14 @@ public abstract class Interpreter<V extends Value> {
* ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE,
* ASTORE, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value the value that must be moved by the instruction.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value
+ * the value that must be moved by the instruction.
* @return the result of the interpretation of the given instruction. The
* returned value must be <tt>equal</tt> to the given value.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V copyOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -109,10 +116,13 @@ public abstract class Interpreter<V extends Value> {
* PUTSTATIC, GETFIELD, NEWARRAY, ANEWARRAY, ARRAYLENGTH, ATHROW, CHECKCAST,
* INSTANCEOF, MONITORENTER, MONITOREXIT, IFNULL, IFNONNULL
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value the argument of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value
+ * the argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V unaryOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -128,11 +138,15 @@ public abstract class Interpreter<V extends Value> {
* DCMPG, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
* IF_ACMPEQ, IF_ACMPNE, PUTFIELD
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value1 the first argument of the instruction to be interpreted.
- * @param value2 the second argument of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value1
+ * the first argument of the instruction to be interpreted.
+ * @param value2
+ * the second argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2)
throws AnalyzerException;
@@ -143,18 +157,20 @@ public abstract class Interpreter<V extends Value> {
*
* IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value1 the first argument of the instruction to be interpreted.
- * @param value2 the second argument of the instruction to be interpreted.
- * @param value3 the third argument of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value1
+ * the first argument of the instruction to be interpreted.
+ * @param value2
+ * the second argument of the instruction to be interpreted.
+ * @param value3
+ * the third argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
- public abstract V ternaryOperation(
- AbstractInsnNode insn,
- V value1,
- V value2,
- V value3) throws AnalyzerException;
+ public abstract V ternaryOperation(AbstractInsnNode insn, V value1,
+ V value2, V value3) throws AnalyzerException;
/**
* Interprets a bytecode instruction with a variable number of arguments.
@@ -163,14 +179,16 @@ public abstract class Interpreter<V extends Value> {
* INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC, INVOKEINTERFACE,
* MULTIANEWARRAY and INVOKEDYNAMIC
*
- * @param insn the bytecode instruction to be interpreted.
- * @param values the arguments of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param values
+ * the arguments of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
- public abstract V naryOperation(
- AbstractInsnNode insn,
- List< ? extends V> values) throws AnalyzerException;
+ public abstract V naryOperation(AbstractInsnNode insn,
+ List<? extends V> values) throws AnalyzerException;
/**
* Interprets a bytecode return instruction. This method is called for the
@@ -178,15 +196,17 @@ public abstract class Interpreter<V extends Value> {
*
* IRETURN, LRETURN, FRETURN, DRETURN, ARETURN
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value the argument of the instruction to be interpreted.
- * @param expected the expected return type of the analyzed method.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value
+ * the argument of the instruction to be interpreted.
+ * @param expected
+ * the expected return type of the analyzed method.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
- public abstract void returnOperation(
- AbstractInsnNode insn,
- V value,
- V expected) throws AnalyzerException;
+ public abstract void returnOperation(AbstractInsnNode insn, V value,
+ V expected) throws AnalyzerException;
/**
* Merges two values. The merge operation must return a value that
@@ -195,8 +215,10 @@ public abstract class Interpreter<V extends Value> {
* values are integer intervals, the merged value must be an interval that
* contains the previous ones. Likewise for other types of values).
*
- * @param v a value.
- * @param w another value.
+ * @param v
+ * a value.
+ * @param w
+ * another value.
* @return the merged value. If the merged value is equal to <tt>v</tt>,
* this method <i>must</i> return <tt>v</tt>.
*/
diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
index c4f515d328..eaecd057ea 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
@@ -79,15 +79,15 @@ public class SimpleVerifier extends BasicVerifier {
* Constructs a new {@link SimpleVerifier} to verify a specific class. This
* class will not be loaded into the JVM since it may be incorrect.
*
- * @param currentClass the class that is verified.
- * @param currentSuperClass the super class of the class that is verified.
- * @param isInterface if the class that is verified is an interface.
+ * @param currentClass
+ * the class that is verified.
+ * @param currentSuperClass
+ * the super class of the class that is verified.
+ * @param isInterface
+ * if the class that is verified is an interface.
*/
- public SimpleVerifier(
- final Type currentClass,
- final Type currentSuperClass,
- final boolean isInterface)
- {
+ public SimpleVerifier(final Type currentClass,
+ final Type currentSuperClass, final boolean isInterface) {
this(currentClass, currentSuperClass, null, isInterface);
}
@@ -95,32 +95,25 @@ public class SimpleVerifier extends BasicVerifier {
* Constructs a new {@link SimpleVerifier} to verify a specific class. This
* class will not be loaded into the JVM since it may be incorrect.
*
- * @param currentClass the class that is verified.
- * @param currentSuperClass the super class of the class that is verified.
- * @param currentClassInterfaces the interfaces implemented by the class
- * that is verified.
- * @param isInterface if the class that is verified is an interface.
+ * @param currentClass
+ * the class that is verified.
+ * @param currentSuperClass
+ * the super class of the class that is verified.
+ * @param currentClassInterfaces
+ * the interfaces implemented by the class that is verified.
+ * @param isInterface
+ * if the class that is verified is an interface.
*/
- public SimpleVerifier(
- final Type currentClass,
- final Type currentSuperClass,
- final List<Type> currentClassInterfaces,
- final boolean isInterface)
- {
- this(ASM4,
- currentClass,
- currentSuperClass,
- currentClassInterfaces,
+ public SimpleVerifier(final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces, final boolean isInterface) {
+ this(ASM4, currentClass, currentSuperClass, currentClassInterfaces,
isInterface);
}
- protected SimpleVerifier(
- final int api,
- final Type currentClass,
- final Type currentSuperClass,
- final List<Type> currentClassInterfaces,
- final boolean isInterface)
- {
+ protected SimpleVerifier(final int api, final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces, final boolean isInterface) {
super(api);
this.currentClass = currentClass;
this.currentSuperClass = currentSuperClass;
@@ -133,7 +126,8 @@ public class SimpleVerifier extends BasicVerifier {
* classes. This is useful if you are verifying multiple interdependent
* classes.
*
- * @param loader a <code>ClassLoader</code> to use
+ * @param loader
+ * a <code>ClassLoader</code> to use
*/
public void setClassLoader(final ClassLoader loader) {
this.loader = loader;
@@ -148,11 +142,11 @@ public class SimpleVerifier extends BasicVerifier {
boolean isArray = type.getSort() == Type.ARRAY;
if (isArray) {
switch (type.getElementType().getSort()) {
- case Type.BOOLEAN:
- case Type.CHAR:
- case Type.BYTE:
- case Type.SHORT:
- return new BasicValue(type);
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ return new BasicValue(type);
}
}
@@ -181,8 +175,7 @@ public class SimpleVerifier extends BasicVerifier {
@Override
protected BasicValue getElementValue(final BasicValue objectArrayValue)
- throws AnalyzerException
- {
+ throws AnalyzerException {
Type arrayType = objectArrayValue.getType();
if (arrayType != null) {
if (arrayType.getSort() == Type.ARRAY) {
@@ -196,28 +189,28 @@ public class SimpleVerifier extends BasicVerifier {
}
@Override
- protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ protected boolean isSubTypeOf(final BasicValue value,
+ final BasicValue expected) {
Type expectedType = expected.getType();
Type type = value.getType();
switch (expectedType.getSort()) {
- case Type.INT:
- case Type.FLOAT:
- case Type.LONG:
- case Type.DOUBLE:
- return type.equals(expectedType);
- case Type.ARRAY:
- case Type.OBJECT:
- if ("Lnull;".equals(type.getDescriptor())) {
- return true;
- } else if (type.getSort() == Type.OBJECT
- || type.getSort() == Type.ARRAY)
- {
- return isAssignableFrom(expectedType, type);
- } else {
- return false;
- }
- default:
- throw new Error("Internal error");
+ case Type.INT:
+ case Type.FLOAT:
+ case Type.LONG:
+ case Type.DOUBLE:
+ return type.equals(expectedType);
+ case Type.ARRAY:
+ case Type.OBJECT:
+ if ("Lnull;".equals(type.getDescriptor())) {
+ return true;
+ } else if (type.getSort() == Type.OBJECT
+ || type.getSort() == Type.ARRAY) {
+ return isAssignableFrom(expectedType, type);
+ } else {
+ return false;
+ }
+ default:
+ throw new Error("Internal error");
}
}
@@ -227,11 +220,9 @@ public class SimpleVerifier extends BasicVerifier {
Type t = v.getType();
Type u = w.getType();
if (t != null
- && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY))
- {
+ && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY)) {
if (u != null
- && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY))
- {
+ && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY)) {
if ("Lnull;".equals(t.getDescriptor())) {
return w;
}
@@ -288,7 +279,8 @@ public class SimpleVerifier extends BasicVerifier {
return false;
} else {
if (isInterface) {
- return u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY;
+ return u.getSort() == Type.OBJECT
+ || u.getSort() == Type.ARRAY;
}
return isAssignableFrom(t, getSuperClass(u));
}
@@ -318,8 +310,7 @@ public class SimpleVerifier extends BasicVerifier {
try {
if (t.getSort() == Type.ARRAY) {
return Class.forName(t.getDescriptor().replace('/', '.'),
- false,
- loader);
+ false, loader);
}
return Class.forName(t.getClassName(), false, loader);
} catch (ClassNotFoundException e) {
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
index 067200b51e..a68086c073 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
@@ -47,8 +47,7 @@ import scala.tools.asm.tree.MethodInsnNode;
* @author Eric Bruneton
*/
public class SourceInterpreter extends Interpreter<SourceValue> implements
- Opcodes
-{
+ Opcodes {
public SourceInterpreter() {
super(ASM4);
@@ -70,125 +69,118 @@ public class SourceInterpreter extends Interpreter<SourceValue> implements
public SourceValue newOperation(final AbstractInsnNode insn) {
int size;
switch (insn.getOpcode()) {
- case LCONST_0:
- case LCONST_1:
- case DCONST_0:
- case DCONST_1:
- size = 2;
- break;
- case LDC:
- Object cst = ((LdcInsnNode) insn).cst;
- size = cst instanceof Long || cst instanceof Double ? 2 : 1;
- break;
- case GETSTATIC:
- size = Type.getType(((FieldInsnNode) insn).desc).getSize();
- break;
- default:
- size = 1;
+ case LCONST_0:
+ case LCONST_1:
+ case DCONST_0:
+ case DCONST_1:
+ size = 2;
+ break;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ size = cst instanceof Long || cst instanceof Double ? 2 : 1;
+ break;
+ case GETSTATIC:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
}
return new SourceValue(size, insn);
}
@Override
- public SourceValue copyOperation(final AbstractInsnNode insn, final SourceValue value) {
+ public SourceValue copyOperation(final AbstractInsnNode insn,
+ final SourceValue value) {
return new SourceValue(value.getSize(), insn);
}
@Override
- public SourceValue unaryOperation(final AbstractInsnNode insn, final SourceValue value)
- {
+ public SourceValue unaryOperation(final AbstractInsnNode insn,
+ final SourceValue value) {
int size;
switch (insn.getOpcode()) {
- case LNEG:
- case DNEG:
- case I2L:
- case I2D:
- case L2D:
- case F2L:
- case F2D:
- case D2L:
- size = 2;
- break;
- case GETFIELD:
- size = Type.getType(((FieldInsnNode) insn).desc).getSize();
- break;
- default:
- size = 1;
+ case LNEG:
+ case DNEG:
+ case I2L:
+ case I2D:
+ case L2D:
+ case F2L:
+ case F2D:
+ case D2L:
+ size = 2;
+ break;
+ case GETFIELD:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
}
return new SourceValue(size, insn);
}
@Override
- public SourceValue binaryOperation(
- final AbstractInsnNode insn,
- final SourceValue value1,
- final SourceValue value2)
- {
+ public SourceValue binaryOperation(final AbstractInsnNode insn,
+ final SourceValue value1, final SourceValue value2) {
int size;
switch (insn.getOpcode()) {
- case LALOAD:
- case DALOAD:
- case LADD:
- case DADD:
- case LSUB:
- case DSUB:
- case LMUL:
- case DMUL:
- case LDIV:
- case DDIV:
- case LREM:
- case DREM:
- case LSHL:
- case LSHR:
- case LUSHR:
- case LAND:
- case LOR:
- case LXOR:
- size = 2;
- break;
- default:
- size = 1;
+ case LALOAD:
+ case DALOAD:
+ case LADD:
+ case DADD:
+ case LSUB:
+ case DSUB:
+ case LMUL:
+ case DMUL:
+ case LDIV:
+ case DDIV:
+ case LREM:
+ case DREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ size = 2;
+ break;
+ default:
+ size = 1;
}
return new SourceValue(size, insn);
}
@Override
- public SourceValue ternaryOperation(
- final AbstractInsnNode insn,
- final SourceValue value1,
- final SourceValue value2,
- final SourceValue value3)
- {
+ public SourceValue ternaryOperation(final AbstractInsnNode insn,
+ final SourceValue value1, final SourceValue value2,
+ final SourceValue value3) {
return new SourceValue(1, insn);
}
@Override
- public SourceValue naryOperation(final AbstractInsnNode insn, final List<? extends SourceValue> values) {
+ public SourceValue naryOperation(final AbstractInsnNode insn,
+ final List<? extends SourceValue> values) {
int size;
int opcode = insn.getOpcode();
if (opcode == MULTIANEWARRAY) {
size = 1;
} else {
- String desc = (opcode == INVOKEDYNAMIC)?
- ((InvokeDynamicInsnNode) insn).desc:
- ((MethodInsnNode) insn).desc;
+ String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc
+ : ((MethodInsnNode) insn).desc;
size = Type.getReturnType(desc).getSize();
}
return new SourceValue(size, insn);
}
@Override
- public void returnOperation(
- final AbstractInsnNode insn,
- final SourceValue value,
- final SourceValue expected)
- {
+ public void returnOperation(final AbstractInsnNode insn,
+ final SourceValue value, final SourceValue expected) {
}
@Override
public SourceValue merge(final SourceValue d, final SourceValue w) {
if (d.insns instanceof SmallSet && w.insns instanceof SmallSet) {
- Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns).union((SmallSet<AbstractInsnNode>) w.insns);
+ Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns)
+ .union((SmallSet<AbstractInsnNode>) w.insns);
if (s == d.insns && d.size == w.size) {
return d;
} else {
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
index 57ff212fb2..40d6b68180 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
@@ -48,8 +48,8 @@ public class SourceValue implements Value {
/**
* The instructions that can produce this value. For example, for the Java
- * code below, the instructions that can produce the value of <tt>i</tt>
- * at line 5 are the txo ISTORE instructions at line 1 and 3:
+ * code below, the instructions that can produce the value of <tt>i</tt> at
+ * line 5 are the txo ISTORE instructions at line 1 and 3:
*
* <pre>
* 1: i = 0;
@@ -64,7 +64,7 @@ public class SourceValue implements Value {
public final Set<AbstractInsnNode> insns;
public SourceValue(final int size) {
- this(size, SmallSet.<AbstractInsnNode>emptySet());
+ this(size, SmallSet.<AbstractInsnNode> emptySet());
}
public SourceValue(final int size, final AbstractInsnNode insn) {
@@ -84,7 +84,7 @@ public class SourceValue implements Value {
@Override
public boolean equals(final Object value) {
if (!(value instanceof SourceValue)) {
- return false;
+ return false;
}
SourceValue v = (SourceValue) value;
return size == v.size && insns.equals(v.insns);
diff --git a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
index 038880ddcd..d734bbd499 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
@@ -51,11 +51,8 @@ class Subroutine {
private Subroutine() {
}
- Subroutine(
- final LabelNode start,
- final int maxLocals,
- final JumpInsnNode caller)
- {
+ Subroutine(final LabelNode start, final int maxLocals,
+ final JumpInsnNode caller) {
this.start = start;
this.access = new boolean[maxLocals];
this.callers = new ArrayList<JumpInsnNode>();
@@ -90,4 +87,4 @@ class Subroutine {
}
return changes;
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/util/ASMifiable.java b/src/asm/scala/tools/asm/util/ASMifiable.java
index 6a31dd508f..95cc6e3a74 100644
--- a/src/asm/scala/tools/asm/util/ASMifiable.java
+++ b/src/asm/scala/tools/asm/util/ASMifiable.java
@@ -34,7 +34,7 @@ import java.util.Map;
import scala.tools.asm.Label;
/**
- * An {@link org.objectweb.asm.Attribute Attribute} that can print the ASM code
+ * An {@link scala.tools.asm.Attribute Attribute} that can print the ASM code
* to create an equivalent attribute.
*
* @author Eugene Kuleshov
@@ -44,10 +44,13 @@ public interface ASMifiable {
/**
* Prints the ASM code to create an attribute equal to this attribute.
*
- * @param buf a buffer used for printing Java code.
- * @param varName name of the variable in a printed code used to store
- * attribute instance.
- * @param labelNames map of label instances to their names.
+ * @param buf
+ * a buffer used for printing Java code.
+ * @param varName
+ * name of the variable in a printed code used to store attribute
+ * instance.
+ * @param labelNames
+ * map of label instances to their names.
*/
void asmify(StringBuffer buf, String varName, Map<Label, String> labelNames);
}
diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
index 5967c877d1..7e6b223853 100644
--- a/src/asm/scala/tools/asm/util/ASMifier.java
+++ b/src/asm/scala/tools/asm/util/ASMifier.java
@@ -91,11 +91,14 @@ public class ASMifier extends Printer {
/**
* Constructs a new {@link ASMifier}.
*
- * @param api the ASM API version implemented by this class. Must be one of
- * {@link Opcodes#ASM4}.
- * @param name the name of the visitor variable in the produced code.
- * @param id identifier of the annotation visitor variable in the produced
- * code.
+ * @param api
+ * the ASM API version implemented by this class. Must be one of
+ * {@link Opcodes#ASM4}.
+ * @param name
+ * the name of the visitor variable in the produced code.
+ * @param id
+ * identifier of the annotation visitor variable in the produced
+ * code.
*/
protected ASMifier(final int api, final String name, final int id) {
super(api);
@@ -105,13 +108,15 @@ public class ASMifier extends Printer {
/**
* Prints the ASM source code to generate the given class to the standard
- * output. <p> Usage: ASMifier [-debug] &lt;binary
- * class name or class file name&gt;
+ * output.
+ * <p>
+ * Usage: ASMifier [-debug] &lt;binary class name or class file name&gt;
*
- * @param args the command line arguments.
+ * @param args
+ * the command line arguments.
*
- * @throws Exception if the class cannot be found, or if an IO exception
- * occurs.
+ * @throws Exception
+ * if the class cannot be found, or if an IO exception occurs.
*/
public static void main(final String[] args) throws Exception {
int i = 0;
@@ -129,22 +134,21 @@ public class ASMifier extends Printer {
}
}
if (!ok) {
- System.err.println("Prints the ASM code to generate the given class.");
+ System.err
+ .println("Prints the ASM code to generate the given class.");
System.err.println("Usage: ASMifier [-debug] "
+ "<fully qualified class name or class file name>");
return;
}
ClassReader cr;
if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
- || args[i].indexOf('/') > -1)
- {
+ || args[i].indexOf('/') > -1) {
cr = new ClassReader(new FileInputStream(args[i]));
} else {
cr = new ClassReader(args[i]);
}
- cr.accept(new TraceClassVisitor(null,
- new ASMifier(),
- new PrintWriter(System.out)), flags);
+ cr.accept(new TraceClassVisitor(null, new ASMifier(), new PrintWriter(
+ System.out)), flags);
}
// ------------------------------------------------------------------------
@@ -152,14 +156,9 @@ public class ASMifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
String simpleName;
int n = name.lastIndexOf('/');
if (n == -1) {
@@ -170,8 +169,8 @@ public class ASMifier extends Printer {
simpleName = name.substring(n + 1);
}
text.add("import java.util.*;\n");
- text.add("import org.objectweb.asm.*;\n");
- text.add("import org.objectweb.asm.attrs.*;\n");
+ text.add("import scala.tools.asm.*;\n");
+ text.add("import scala.tools.asm.attrs.*;\n");
text.add("public class " + simpleName + "Dump implements Opcodes {\n\n");
text.add("public static byte[] dump () throws Exception {\n\n");
text.add("ClassWriter cw = new ClassWriter(0);\n");
@@ -182,30 +181,30 @@ public class ASMifier extends Printer {
buf.setLength(0);
buf.append("cw.visit(");
switch (version) {
- case Opcodes.V1_1:
- buf.append("V1_1");
- break;
- case Opcodes.V1_2:
- buf.append("V1_2");
- break;
- case Opcodes.V1_3:
- buf.append("V1_3");
- break;
- case Opcodes.V1_4:
- buf.append("V1_4");
- break;
- case Opcodes.V1_5:
- buf.append("V1_5");
- break;
- case Opcodes.V1_6:
- buf.append("V1_6");
- break;
- case Opcodes.V1_7:
- buf.append("V1_7");
- break;
- default:
- buf.append(version);
- break;
+ case Opcodes.V1_1:
+ buf.append("V1_1");
+ break;
+ case Opcodes.V1_2:
+ buf.append("V1_2");
+ break;
+ case Opcodes.V1_3:
+ buf.append("V1_3");
+ break;
+ case Opcodes.V1_4:
+ buf.append("V1_4");
+ break;
+ case Opcodes.V1_5:
+ buf.append("V1_5");
+ break;
+ case Opcodes.V1_6:
+ buf.append("V1_6");
+ break;
+ case Opcodes.V1_7:
+ buf.append("V1_7");
+ break;
+ default:
+ buf.append(version);
+ break;
}
buf.append(", ");
appendAccess(access | ACCESS_CLASS);
@@ -242,11 +241,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
buf.setLength(0);
buf.append("cw.visitOuterClass(");
appendConstant(owner);
@@ -259,10 +255,8 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitClassAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitClassAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@@ -272,12 +266,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
buf.setLength(0);
buf.append("cw.visitInnerClass(");
appendConstant(name);
@@ -292,13 +282,8 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public ASMifier visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
buf.setLength(0);
buf.append("{\n");
buf.append("fv = cw.visitField(");
@@ -320,13 +305,8 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public ASMifier visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
buf.setLength(0);
buf.append("{\n");
buf.append("mv = cw.visitMethod(");
@@ -380,11 +360,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
buf.setLength(0);
buf.append("av").append(id).append(".visitEnum(");
appendConstant(buf, name);
@@ -397,10 +374,7 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitAnnotation(
- final String name,
- final String desc)
- {
+ public ASMifier visitAnnotation(final String name, final String desc) {
buf.setLength(0);
buf.append("{\n");
buf.append("AnnotationVisitor av").append(id + 1).append(" = av");
@@ -443,10 +417,8 @@ public class ASMifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public ASMifier visitFieldAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitFieldAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@@ -469,9 +441,7 @@ public class ASMifier extends Printer {
@Override
public ASMifier visitAnnotationDefault() {
buf.setLength(0);
- buf.append("{\n")
- .append("av0 = ")
- .append(name)
+ buf.append("{\n").append("av0 = ").append(name)
.append(".visitAnnotationDefault();\n");
text.add(buf.toString());
ASMifier a = createASMifier("av", 0);
@@ -481,23 +451,17 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitMethodAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitMethodAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@Override
- public ASMifier visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
buf.setLength(0);
- buf.append("{\n")
- .append("av0 = ").append(name).append(".visitParameterAnnotation(")
- .append(parameter)
+ buf.append("{\n").append("av0 = ").append(name)
+ .append(".visitParameterAnnotation(").append(parameter)
.append(", ");
appendConstant(desc);
buf.append(", ").append(visible).append(");\n");
@@ -519,52 +483,47 @@ public class ASMifier extends Printer {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
buf.setLength(0);
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- declareFrameTypes(nLocal, local);
- declareFrameTypes(nStack, stack);
- if (type == Opcodes.F_NEW) {
- buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
- } else {
- buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
- }
- buf.append(nLocal).append(", new Object[] {");
- appendFrameTypes(nLocal, local);
- buf.append("}, ").append(nStack).append(", new Object[] {");
- appendFrameTypes(nStack, stack);
- buf.append('}');
- break;
- case Opcodes.F_APPEND:
- declareFrameTypes(nLocal, local);
- buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
- .append(nLocal)
- .append(", new Object[] {");
- appendFrameTypes(nLocal, local);
- buf.append("}, 0, null");
- break;
- case Opcodes.F_CHOP:
- buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
- .append(nLocal)
- .append(", null, 0, null");
- break;
- case Opcodes.F_SAME:
- buf.append(name).append(".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
- break;
- case Opcodes.F_SAME1:
- declareFrameTypes(1, stack);
- buf.append(name).append(".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
- appendFrameTypes(1, stack);
- buf.append('}');
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ declareFrameTypes(nLocal, local);
+ declareFrameTypes(nStack, stack);
+ if (type == Opcodes.F_NEW) {
+ buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
+ } else {
+ buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
+ }
+ buf.append(nLocal).append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, ").append(nStack).append(", new Object[] {");
+ appendFrameTypes(nStack, stack);
+ buf.append('}');
+ break;
+ case Opcodes.F_APPEND:
+ declareFrameTypes(nLocal, local);
+ buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
+ .append(nLocal).append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, 0, null");
+ break;
+ case Opcodes.F_CHOP:
+ buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
+ .append(nLocal).append(", null, 0, null");
+ break;
+ case Opcodes.F_SAME:
+ buf.append(name).append(
+ ".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
+ break;
+ case Opcodes.F_SAME1:
+ declareFrameTypes(1, stack);
+ buf.append(name).append(
+ ".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
+ appendFrameTypes(1, stack);
+ buf.append('}');
+ break;
}
buf.append(");\n");
text.add(buf.toString());
@@ -573,7 +532,8 @@ public class ASMifier extends Printer {
@Override
public void visitInsn(final int opcode) {
buf.setLength(0);
- buf.append(name).append(".visitInsn(").append(OPCODES[opcode]).append(");\n");
+ buf.append(name).append(".visitInsn(").append(OPCODES[opcode])
+ .append(");\n");
text.add(buf.toString());
}
@@ -584,43 +544,35 @@ public class ASMifier extends Printer {
.append(".visitIntInsn(")
.append(OPCODES[opcode])
.append(", ")
- .append(opcode == Opcodes.NEWARRAY
- ? TYPES[operand]
- : Integer.toString(operand))
- .append(");\n");
+ .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer
+ .toString(operand)).append(");\n");
text.add(buf.toString());
}
@Override
public void visitVarInsn(final int opcode, final int var) {
buf.setLength(0);
- buf.append(name)
- .append(".visitVarInsn(")
- .append(OPCODES[opcode])
- .append(", ")
- .append(var)
- .append(");\n");
+ buf.append(name).append(".visitVarInsn(").append(OPCODES[opcode])
+ .append(", ").append(var).append(");\n");
text.add(buf.toString());
}
@Override
public void visitTypeInsn(final int opcode, final String type) {
buf.setLength(0);
- buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode])
+ .append(", ");
appendConstant(type);
buf.append(");\n");
text.add(buf.toString());
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
- buf.append(this.name).append(".visitFieldInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(this.name).append(".visitFieldInsn(")
+ .append(OPCODES[opcode]).append(", ");
appendConstant(owner);
buf.append(", ");
appendConstant(name);
@@ -631,14 +583,11 @@ public class ASMifier extends Printer {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
- buf.append(this.name).append(".visitMethodInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(this.name).append(".visitMethodInsn(")
+ .append(OPCODES[opcode]).append(", ");
appendConstant(owner);
buf.append(", ");
appendConstant(name);
@@ -649,12 +598,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
buf.setLength(0);
buf.append(this.name).append(".visitInvokeDynamicInsn(");
appendConstant(name);
@@ -677,7 +622,8 @@ public class ASMifier extends Printer {
public void visitJumpInsn(final int opcode, final Label label) {
buf.setLength(0);
declareLabel(label);
- buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode])
+ .append(", ");
appendLabel(label);
buf.append(");\n");
text.add(buf.toString());
@@ -705,34 +651,22 @@ public class ASMifier extends Printer {
@Override
public void visitIincInsn(final int var, final int increment) {
buf.setLength(0);
- buf.append(name)
- .append(".visitIincInsn(")
- .append(var)
- .append(", ")
- .append(increment)
- .append(");\n");
+ buf.append(name).append(".visitIincInsn(").append(var).append(", ")
+ .append(increment).append(");\n");
text.add(buf.toString());
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
buf.setLength(0);
for (int i = 0; i < labels.length; ++i) {
declareLabel(labels[i]);
}
declareLabel(dflt);
- buf.append(name)
- .append(".visitTableSwitchInsn(")
- .append(min)
- .append(", ")
- .append(max)
- .append(", ");
+ buf.append(name).append(".visitTableSwitchInsn(").append(min)
+ .append(", ").append(max).append(", ");
appendLabel(dflt);
buf.append(", new Label[] {");
for (int i = 0; i < labels.length; ++i) {
@@ -744,11 +678,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
buf.setLength(0);
for (int i = 0; i < labels.length; ++i) {
declareLabel(labels[i]);
@@ -780,12 +711,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
buf.setLength(0);
declareLabel(start);
declareLabel(end);
@@ -803,14 +730,9 @@ public class ASMifier extends Printer {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
buf.setLength(0);
buf.append(this.name).append(".visitLocalVariable(");
appendConstant(name);
@@ -838,12 +760,8 @@ public class ASMifier extends Printer {
@Override
public void visitMaxs(final int maxStack, final int maxLocals) {
buf.setLength(0);
- buf.append(name)
- .append(".visitMaxs(")
- .append(maxStack)
- .append(", ")
- .append(maxLocals)
- .append(");\n");
+ buf.append(name).append(".visitMaxs(").append(maxStack).append(", ")
+ .append(maxLocals).append(");\n");
text.add(buf.toString());
}
@@ -858,14 +776,9 @@ public class ASMifier extends Printer {
// Common methods
// ------------------------------------------------------------------------
- public ASMifier visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitAnnotation(final String desc, final boolean visible) {
buf.setLength(0);
- buf.append("{\n")
- .append("av0 = ")
- .append(name)
+ buf.append("{\n").append("av0 = ").append(name)
.append(".visitAnnotation(");
appendConstant(desc);
buf.append(", ").append(visible).append(");\n");
@@ -895,15 +808,16 @@ public class ASMifier extends Printer {
// Utility methods
// ------------------------------------------------------------------------
- protected ASMifier createASMifier(final String name, final int id) {
+ protected ASMifier createASMifier(final String name, final int id) {
return new ASMifier(Opcodes.ASM4, name, id);
}
/**
- * Appends a string representation of the given access modifiers to {@link
- * #buf buf}.
+ * Appends a string representation of the given access modifiers to
+ * {@link #buf buf}.
*
- * @param access some access modifiers.
+ * @param access
+ * some access modifiers.
*/
void appendAccess(final int access) {
boolean first = true;
@@ -945,8 +859,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_VOLATILE) != 0
- && (access & ACCESS_FIELD) != 0)
- {
+ && (access & ACCESS_FIELD) != 0) {
if (!first) {
buf.append(" + ");
}
@@ -954,8 +867,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_BRIDGE) != 0 && (access & ACCESS_CLASS) == 0
- && (access & ACCESS_FIELD) == 0)
- {
+ && (access & ACCESS_FIELD) == 0) {
if (!first) {
buf.append(" + ");
}
@@ -963,8 +875,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_VARARGS) != 0 && (access & ACCESS_CLASS) == 0
- && (access & ACCESS_FIELD) == 0)
- {
+ && (access & ACCESS_FIELD) == 0) {
if (!first) {
buf.append(" + ");
}
@@ -972,8 +883,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_TRANSIENT) != 0
- && (access & ACCESS_FIELD) != 0)
- {
+ && (access & ACCESS_FIELD) != 0) {
if (!first) {
buf.append(" + ");
}
@@ -981,8 +891,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_NATIVE) != 0 && (access & ACCESS_CLASS) == 0
- && (access & ACCESS_FIELD) == 0)
- {
+ && (access & ACCESS_FIELD) == 0) {
if (!first) {
buf.append(" + ");
}
@@ -991,8 +900,7 @@ public class ASMifier extends Printer {
}
if ((access & Opcodes.ACC_ENUM) != 0
&& ((access & ACCESS_CLASS) != 0
- || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0))
- {
+ || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0)) {
if (!first) {
buf.append(" + ");
}
@@ -1000,8 +908,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_ANNOTATION) != 0
- && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0))
- {
+ && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0)) {
if (!first) {
buf.append(" + ");
}
@@ -1052,8 +959,9 @@ public class ASMifier extends Printer {
* Appends a string representation of the given constant to the given
* buffer.
*
- * @param cst an {@link Integer}, {@link Float}, {@link Long},
- * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ * @param cst
+ * an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
*/
protected void appendConstant(final Object cst) {
appendConstant(buf, cst);
@@ -1063,9 +971,11 @@ public class ASMifier extends Printer {
* Appends a string representation of the given constant to the given
* buffer.
*
- * @param buf a string buffer.
- * @param cst an {@link Integer}, {@link Float}, {@link Long},
- * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ * @param buf
+ * a string buffer.
+ * @param cst
+ * an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
*/
static void appendConstant(final StringBuffer buf, final Object cst) {
if (cst == null) {
@@ -1079,14 +989,16 @@ public class ASMifier extends Printer {
} else if (cst instanceof Handle) {
buf.append("new Handle(");
Handle h = (Handle) cst;
- buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()]).append(", \"");
+ buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()])
+ .append(", \"");
buf.append(h.getOwner()).append("\", \"");
buf.append(h.getName()).append("\", \"");
buf.append(h.getDesc()).append("\")");
} else if (cst instanceof Byte) {
buf.append("new Byte((byte)").append(cst).append(')');
} else if (cst instanceof Boolean) {
- buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE" : "Boolean.FALSE");
+ buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE"
+ : "Boolean.FALSE");
} else if (cst instanceof Short) {
buf.append("new Short((short)").append(cst).append(')');
} else if (cst instanceof Character) {
@@ -1125,8 +1037,7 @@ public class ASMifier extends Printer {
char[] v = (char[]) cst;
buf.append("new char[] {");
for (int i = 0; i < v.length; i++) {
- buf.append(i == 0 ? "" : ",")
- .append("(char)")
+ buf.append(i == 0 ? "" : ",").append("(char)")
.append((int) v[i]);
}
buf.append('}');
@@ -1178,27 +1089,27 @@ public class ASMifier extends Printer {
appendConstant(o[i]);
} else if (o[i] instanceof Integer) {
switch (((Integer) o[i]).intValue()) {
- case 0:
- buf.append("Opcodes.TOP");
- break;
- case 1:
- buf.append("Opcodes.INTEGER");
- break;
- case 2:
- buf.append("Opcodes.FLOAT");
- break;
- case 3:
- buf.append("Opcodes.DOUBLE");
- break;
- case 4:
- buf.append("Opcodes.LONG");
- break;
- case 5:
- buf.append("Opcodes.NULL");
- break;
- case 6:
- buf.append("Opcodes.UNINITIALIZED_THIS");
- break;
+ case 0:
+ buf.append("Opcodes.TOP");
+ break;
+ case 1:
+ buf.append("Opcodes.INTEGER");
+ break;
+ case 2:
+ buf.append("Opcodes.FLOAT");
+ break;
+ case 3:
+ buf.append("Opcodes.DOUBLE");
+ break;
+ case 4:
+ buf.append("Opcodes.LONG");
+ break;
+ case 5:
+ buf.append("Opcodes.NULL");
+ break;
+ case 6:
+ buf.append("Opcodes.UNINITIALIZED_THIS");
+ break;
}
} else {
appendLabel((Label) o[i]);
@@ -1211,7 +1122,8 @@ public class ASMifier extends Printer {
* declaration is of the form "Label lXXX = new Label();". Does nothing if
* the given label has already been declared.
*
- * @param l a label.
+ * @param l
+ * a label.
*/
protected void declareLabel(final Label l) {
if (labelNames == null) {
@@ -1227,10 +1139,11 @@ public class ASMifier extends Printer {
/**
* Appends the name of the given label to {@link #buf buf}. The given label
- * <i>must</i> already have a name. One way to ensure this is to always
- * call {@link #declareLabel declared} before calling this method.
+ * <i>must</i> already have a name. One way to ensure this is to always call
+ * {@link #declareLabel declared} before calling this method.
*
- * @param l a label.
+ * @param l
+ * a label.
*/
protected void appendLabel(final Label l) {
buf.append(labelNames.get(l));
diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
index 8030c14f2e..f00a8f04a2 100644
--- a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
@@ -65,8 +65,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
|| value instanceof byte[] || value instanceof boolean[]
|| value instanceof char[] || value instanceof short[]
|| value instanceof int[] || value instanceof long[]
- || value instanceof float[] || value instanceof double[]))
- {
+ || value instanceof float[] || value instanceof double[])) {
throw new IllegalArgumentException("Invalid annotation value");
}
if (value instanceof Type) {
@@ -81,11 +80,8 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
checkEnd();
checkName(name);
CheckMethodAdapter.checkDesc(desc, false);
@@ -98,15 +94,12 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
checkEnd();
checkName(name);
CheckMethodAdapter.checkDesc(desc, false);
- return new CheckAnnotationAdapter(av == null
- ? null
+ return new CheckAnnotationAdapter(av == null ? null
: av.visitAnnotation(name, desc));
}
@@ -114,8 +107,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
public AnnotationVisitor visitArray(final String name) {
checkEnd();
checkName(name);
- return new CheckAnnotationAdapter(av == null
- ? null
+ return new CheckAnnotationAdapter(av == null ? null
: av.visitArray(name), false);
}
@@ -130,13 +122,15 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
private void checkEnd() {
if (end) {
- throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ throw new IllegalStateException(
+ "Cannot call a visit method after visitEnd has been called");
}
}
private void checkName(final String name) {
if (named && name == null) {
- throw new IllegalArgumentException("Annotation value name must not be null");
+ throw new IllegalArgumentException(
+ "Annotation value name must not be null");
}
}
}
diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
index a455322531..0bfa143a95 100644
--- a/src/asm/scala/tools/asm/util/CheckClassAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
@@ -59,10 +59,10 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* <i>only</i> on its arguments, but does <i>not</i> check the <i>sequence</i>
* of method calls. For example, the invalid sequence
* <tt>visitField(ACC_PUBLIC, "i", "I", null)</tt> <tt>visitField(ACC_PUBLIC,
- * "i", "D", null)</tt>
- * will <i>not</i> be detected by this class adapter.
+ * "i", "D", null)</tt> will <i>not</i> be detected by this class adapter.
*
- * <p><code>CheckClassAdapter</code> can be also used to verify bytecode
+ * <p>
+ * <code>CheckClassAdapter</code> can be also used to verify bytecode
* transformations in order to make sure transformed bytecode is sane. For
* example:
*
@@ -80,19 +80,20 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* </pre>
*
* Above code runs transformed bytecode trough the
- * <code>CheckClassAdapter</code>. It won't be exactly the same verification
- * as JVM does, but it run data flow analysis for the code of each method and
+ * <code>CheckClassAdapter</code>. It won't be exactly the same verification as
+ * JVM does, but it run data flow analysis for the code of each method and
* checks that expectations are met for each method instruction.
*
- * <p>If method bytecode has errors, assertion text will show the erroneous
+ * <p>
+ * If method bytecode has errors, assertion text will show the erroneous
* instruction number and dump of the failed method with information about
* locals and stack slot for each instruction. For example (format is -
* insnNumber locals : stack):
*
* <pre>
- * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
- * at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
- * at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
+ * scala.tools.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
+ * at scala.tools.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
+ * at scala.tools.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
* ...
* remove()V
* 00000 LinkedBlockingQueue$Itr . . . . . . . . :
@@ -114,8 +115,9 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* initialized. You can also see that at the beginning of the method (code
* inserted by the transformation) variable 2 is initialized.
*
- * <p>Note that when used like that, <code>CheckClassAdapter.verify()</code>
- * can trigger additional class loading, because it is using
+ * <p>
+ * Note that when used like that, <code>CheckClassAdapter.verify()</code> can
+ * trigger additional class loading, because it is using
* <code>SimpleVerifier</code>.
*
* @author Eric Bruneton
@@ -159,13 +161,15 @@ public class CheckClassAdapter extends ClassVisitor {
private boolean checkDataFlow;
/**
- * Checks a given class. <p> Usage: CheckClassAdapter &lt;binary
- * class name or class file name&gt;
+ * Checks a given class.
+ * <p>
+ * Usage: CheckClassAdapter &lt;binary class name or class file name&gt;
*
- * @param args the command line arguments.
+ * @param args
+ * the command line arguments.
*
- * @throws Exception if the class cannot be found, or if an IO exception
- * occurs.
+ * @throws Exception
+ * if the class cannot be found, or if an IO exception occurs.
*/
public static void main(final String[] args) throws Exception {
if (args.length != 1) {
@@ -187,27 +191,26 @@ public class CheckClassAdapter extends ClassVisitor {
/**
* Checks a given class.
*
- * @param cr a <code>ClassReader</code> that contains bytecode for the
- * analysis.
- * @param loader a <code>ClassLoader</code> which will be used to load
- * referenced classes. This is useful if you are verifiying multiple
- * interdependent classes.
- * @param dump true if bytecode should be printed out not only when errors
- * are found.
- * @param pw write where results going to be printed
+ * @param cr
+ * a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param loader
+ * a <code>ClassLoader</code> which will be used to load
+ * referenced classes. This is useful if you are verifiying
+ * multiple interdependent classes.
+ * @param dump
+ * true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw
+ * write where results going to be printed
*/
- public static void verify(
- final ClassReader cr,
- final ClassLoader loader,
- final boolean dump,
- final PrintWriter pw)
- {
+ public static void verify(final ClassReader cr, final ClassLoader loader,
+ final boolean dump, final PrintWriter pw) {
ClassNode cn = new ClassNode();
cr.accept(new CheckClassAdapter(cn, false), ClassReader.SKIP_DEBUG);
- Type syperType = cn.superName == null
- ? null
- : Type.getObjectType(cn.superName);
+ Type syperType = cn.superName == null ? null : Type
+ .getObjectType(cn.superName);
List<MethodNode> methods = cn.methods;
List<Type> interfaces = new ArrayList<Type>();
@@ -217,9 +220,8 @@ public class CheckClassAdapter extends ClassVisitor {
for (int i = 0; i < methods.size(); ++i) {
MethodNode method = methods.get(i);
- SimpleVerifier verifier = new SimpleVerifier(Type.getObjectType(cn.name),
- syperType,
- interfaces,
+ SimpleVerifier verifier = new SimpleVerifier(
+ Type.getObjectType(cn.name), syperType, interfaces,
(cn.access & Opcodes.ACC_INTERFACE) != 0);
Analyzer<BasicValue> a = new Analyzer<BasicValue>(verifier);
if (loader != null) {
@@ -241,25 +243,22 @@ public class CheckClassAdapter extends ClassVisitor {
/**
* Checks a given class
*
- * @param cr a <code>ClassReader</code> that contains bytecode for the
- * analysis.
- * @param dump true if bytecode should be printed out not only when errors
- * are found.
- * @param pw write where results going to be printed
+ * @param cr
+ * a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param dump
+ * true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw
+ * write where results going to be printed
*/
- public static void verify(
- final ClassReader cr,
- final boolean dump,
- final PrintWriter pw)
- {
+ public static void verify(final ClassReader cr, final boolean dump,
+ final PrintWriter pw) {
verify(cr, null, dump, pw);
}
- static void printAnalyzerResult(
- MethodNode method,
- Analyzer<BasicValue> a,
- final PrintWriter pw)
- {
+ static void printAnalyzerResult(MethodNode method, Analyzer<BasicValue> a,
+ final PrintWriter pw) {
Frame<BasicValue>[] frames = a.getFrames();
Textifier t = new Textifier();
TraceMethodVisitor mv = new TraceMethodVisitor(t);
@@ -310,7 +309,8 @@ public class CheckClassAdapter extends ClassVisitor {
* this constructor</i>. Instead, they must use the
* {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
*
- * @param cv the class visitor to which this adapter must delegate calls.
+ * @param cv
+ * the class visitor to which this adapter must delegate calls.
*/
public CheckClassAdapter(final ClassVisitor cv) {
this(cv, true);
@@ -321,33 +321,34 @@ public class CheckClassAdapter extends ClassVisitor {
* this constructor</i>. Instead, they must use the
* {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
*
- * @param cv the class visitor to which this adapter must delegate calls.
- * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
- * <tt>false</tt> to not perform any data flow check (see
- * {@link CheckMethodAdapter}). This option requires valid maxLocals
- * and maxStack values.
+ * @param cv
+ * the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow
+ * <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid
+ * maxLocals and maxStack values.
*/
- public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow)
- {
+ public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) {
this(Opcodes.ASM4, cv, checkDataFlow);
}
/**
* Constructs a new {@link CheckClassAdapter}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param cv the class visitor to which this adapter must delegate calls.
- * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
- * <tt>false</tt> to not perform any data flow check (see
- * {@link CheckMethodAdapter}). This option requires valid maxLocals
- * and maxStack values.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv
+ * the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow
+ * <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid
+ * maxLocals and maxStack values.
*/
- protected CheckClassAdapter(
- final int api,
- final ClassVisitor cv,
- final boolean checkDataFlow)
- {
+ protected CheckClassAdapter(final int api, final ClassVisitor cv,
+ final boolean checkDataFlow) {
super(api, cv);
this.labels = new HashMap<Label, Integer>();
this.checkDataFlow = checkDataFlow;
@@ -358,14 +359,9 @@ public class CheckClassAdapter extends ClassVisitor {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
if (start) {
throw new IllegalStateException("visit must be called only once");
}
@@ -375,24 +371,25 @@ public class CheckClassAdapter extends ClassVisitor {
+ Opcodes.ACC_SUPER + Opcodes.ACC_INTERFACE
+ Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC
+ Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM
- + Opcodes.ACC_DEPRECATED
- + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
if (name == null || !name.endsWith("package-info")) {
CheckMethodAdapter.checkInternalName(name, "class name");
}
if ("java/lang/Object".equals(name)) {
if (superName != null) {
- throw new IllegalArgumentException("The super class name of the Object class must be 'null'");
+ throw new IllegalArgumentException(
+ "The super class name of the Object class must be 'null'");
}
} else {
CheckMethodAdapter.checkInternalName(superName, "super class name");
}
if (signature != null) {
- CheckMethodAdapter.checkClassSignature(signature);
+ checkClassSignature(signature);
}
if ((access & Opcodes.ACC_INTERFACE) != 0) {
if (!"java/lang/Object".equals(superName)) {
- throw new IllegalArgumentException("The super class name of interfaces must be 'java/lang/Object'");
+ throw new IllegalArgumentException(
+ "The super class name of interfaces must be 'java/lang/Object'");
}
}
if (interfaces != null) {
@@ -409,21 +406,20 @@ public class CheckClassAdapter extends ClassVisitor {
public void visitSource(final String file, final String debug) {
checkState();
if (source) {
- throw new IllegalStateException("visitSource can be called only once.");
+ throw new IllegalStateException(
+ "visitSource can be called only once.");
}
source = true;
super.visitSource(file, debug);
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
checkState();
if (outer) {
- throw new IllegalStateException("visitOuterClass can be called only once.");
+ throw new IllegalStateException(
+ "visitOuterClass can be called only once.");
}
outer = true;
if (owner == null) {
@@ -436,12 +432,8 @@ public class CheckClassAdapter extends ClassVisitor {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
checkState();
CheckMethodAdapter.checkInternalName(name, "class name");
if (outerName != null) {
@@ -459,52 +451,44 @@ public class CheckClassAdapter extends ClassVisitor {
}
@Override
- public FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
checkState();
checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ Opcodes.ACC_FINAL + Opcodes.ACC_VOLATILE
+ Opcodes.ACC_TRANSIENT + Opcodes.ACC_SYNTHETIC
- + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED
- + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
CheckMethodAdapter.checkUnqualifiedName(version, name, "field name");
CheckMethodAdapter.checkDesc(desc, false);
if (signature != null) {
- CheckMethodAdapter.checkFieldSignature(signature);
+ checkFieldSignature(signature);
}
if (value != null) {
CheckMethodAdapter.checkConstant(value);
}
- FieldVisitor av = super.visitField(access, name, desc, signature, value);
+ FieldVisitor av = super
+ .visitField(access, name, desc, signature, value);
return new CheckFieldAdapter(av);
}
@Override
- public MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
checkState();
checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ Opcodes.ACC_FINAL + Opcodes.ACC_SYNCHRONIZED
+ Opcodes.ACC_BRIDGE + Opcodes.ACC_VARARGS + Opcodes.ACC_NATIVE
+ Opcodes.ACC_ABSTRACT + Opcodes.ACC_STRICT
- + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED
- + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- CheckMethodAdapter.checkMethodIdentifier(version, name, "method name");
+ + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ if (!"<init>".equals(name) && !"<clinit>".equals(name)) {
+ CheckMethodAdapter.checkMethodIdentifier(version, name,
+ "method name");
+ }
CheckMethodAdapter.checkMethodDesc(desc);
if (signature != null) {
- CheckMethodAdapter.checkMethodSignature(signature);
+ checkMethodSignature(signature);
}
if (exceptions != null) {
for (int i = 0; i < exceptions.length; ++i) {
@@ -514,27 +498,19 @@ public class CheckClassAdapter extends ClassVisitor {
}
CheckMethodAdapter cma;
if (checkDataFlow) {
- cma = new CheckMethodAdapter(access,
- name,
- desc,
- super.visitMethod(access, name, desc, signature, exceptions),
- labels);
+ cma = new CheckMethodAdapter(access, name, desc, super.visitMethod(
+ access, name, desc, signature, exceptions), labels);
} else {
- cma = new CheckMethodAdapter(super.visitMethod(access,
- name,
- desc,
- signature,
- exceptions), labels);
+ cma = new CheckMethodAdapter(super.visitMethod(access, name, desc,
+ signature, exceptions), labels);
}
cma.version = version;
return cma;
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
checkState();
CheckMethodAdapter.checkDesc(desc, false);
return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -544,7 +520,8 @@ public class CheckClassAdapter extends ClassVisitor {
public void visitAttribute(final Attribute attr) {
checkState();
if (attr == null) {
- throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ throw new IllegalArgumentException(
+ "Invalid attribute (must not be null)");
}
super.visitAttribute(attr);
}
@@ -566,10 +543,12 @@ public class CheckClassAdapter extends ClassVisitor {
*/
private void checkState() {
if (!start) {
- throw new IllegalStateException("Cannot visit member before visit has been called.");
+ throw new IllegalStateException(
+ "Cannot visit member before visit has been called.");
}
if (end) {
- throw new IllegalStateException("Cannot visit member after visitEnd has been called.");
+ throw new IllegalStateException(
+ "Cannot visit member after visitEnd has been called.");
}
}
@@ -578,8 +557,10 @@ public class CheckClassAdapter extends ClassVisitor {
* method also checks that mutually incompatible flags are not set
* simultaneously.
*
- * @param access the access flags to be checked
- * @param possibleAccess the valid access flags.
+ * @param access
+ * the access flags to be checked
+ * @param possibleAccess
+ * the valid access flags.
*/
static void checkAccess(final int access, final int possibleAccess) {
if ((access & ~possibleAccess) != 0) {
@@ -590,14 +571,336 @@ public class CheckClassAdapter extends ClassVisitor {
int pri = (access & Opcodes.ACC_PRIVATE) == 0 ? 0 : 1;
int pro = (access & Opcodes.ACC_PROTECTED) == 0 ? 0 : 1;
if (pub + pri + pro > 1) {
- throw new IllegalArgumentException("public private and protected are mutually exclusive: "
- + access);
+ throw new IllegalArgumentException(
+ "public private and protected are mutually exclusive: "
+ + access);
}
int fin = (access & Opcodes.ACC_FINAL) == 0 ? 0 : 1;
int abs = (access & Opcodes.ACC_ABSTRACT) == 0 ? 0 : 1;
if (fin + abs > 1) {
- throw new IllegalArgumentException("final and abstract are mutually exclusive: "
- + access);
+ throw new IllegalArgumentException(
+ "final and abstract are mutually exclusive: " + access);
+ }
+ }
+
+ /**
+ * Checks a class signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ */
+ public static void checkClassSignature(final String signature) {
+ // ClassSignature:
+ // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkClassTypeSignature(signature, pos);
+ while (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a method signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ */
+ public static void checkMethodSignature(final String signature) {
+ // MethodTypeSignature:
+ // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
+ // ^ClassTypeSignature | ^TypeVariableSignature )*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkChar('(', signature, pos);
+ while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkTypeSignature(signature, pos);
+ }
+ pos = checkChar(')', signature, pos);
+ if (getChar(signature, pos) == 'V') {
+ ++pos;
+ } else {
+ pos = checkTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == '^') {
+ ++pos;
+ if (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ } else {
+ pos = checkTypeVariableSignature(signature, pos);
+ }
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a field signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ */
+ public static void checkFieldSignature(final String signature) {
+ int pos = checkFieldTypeSignature(signature, 0);
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks the formal type parameters of a class or method signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameters(final String signature, int pos) {
+ // FormalTypeParameters:
+ // < FormalTypeParameter+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkFormalTypeParameter(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkFormalTypeParameter(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a formal type parameter of a class or method signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameter(final String signature, int pos) {
+ // FormalTypeParameter:
+ // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
+
+ pos = checkIdentifier(signature, pos);
+ pos = checkChar(':', signature, pos);
+ if ("L[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkFieldTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == ':') {
+ pos = checkFieldTypeSignature(signature, pos + 1);
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a field type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFieldTypeSignature(final String signature, int pos) {
+ // FieldTypeSignature:
+ // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
+ //
+ // ArrayTypeSignature:
+ // [ TypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'L':
+ return checkClassTypeSignature(signature, pos);
+ case '[':
+ return checkTypeSignature(signature, pos + 1);
+ default:
+ return checkTypeVariableSignature(signature, pos);
}
}
+
+ /**
+ * Checks a class type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkClassTypeSignature(final String signature, int pos) {
+ // ClassTypeSignature:
+ // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
+ // TypeArguments? )* ;
+
+ pos = checkChar('L', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ while (getChar(signature, pos) == '/') {
+ pos = checkIdentifier(signature, pos + 1);
+ }
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ while (getChar(signature, pos) == '.') {
+ pos = checkIdentifier(signature, pos + 1);
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ }
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks the type arguments in a class type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArguments(final String signature, int pos) {
+ // TypeArguments:
+ // < TypeArgument+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkTypeArgument(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkTypeArgument(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a type argument in a class type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArgument(final String signature, int pos) {
+ // TypeArgument:
+ // * | ( ( + | - )? FieldTypeSignature )
+
+ char c = getChar(signature, pos);
+ if (c == '*') {
+ return pos + 1;
+ } else if (c == '+' || c == '-') {
+ pos++;
+ }
+ return checkFieldTypeSignature(signature, pos);
+ }
+
+ /**
+ * Checks a type variable signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeVariableSignature(final String signature,
+ int pos) {
+ // TypeVariableSignature:
+ // T Identifier ;
+
+ pos = checkChar('T', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks a type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeSignature(final String signature, int pos) {
+ // TypeSignature:
+ // Z | C | B | S | I | F | J | D | FieldTypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return pos + 1;
+ default:
+ return checkFieldTypeSignature(signature, pos);
+ }
+ }
+
+ /**
+ * Checks an identifier.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkIdentifier(final String signature, int pos) {
+ if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
+ throw new IllegalArgumentException(signature
+ + ": identifier expected at index " + pos);
+ }
+ ++pos;
+ while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
+ ++pos;
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a single character.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkChar(final char c, final String signature, int pos) {
+ if (getChar(signature, pos) == c) {
+ return pos + 1;
+ }
+ throw new IllegalArgumentException(signature + ": '" + c
+ + "' expected at index " + pos);
+ }
+
+ /**
+ * Returns the signature car at the given index.
+ *
+ * @param signature
+ * a signature.
+ * @param pos
+ * an index in signature.
+ * @return the character at the given index, or 0 if there is no such
+ * character.
+ */
+ private static char getChar(final String signature, int pos) {
+ return pos < signature.length() ? signature.charAt(pos) : (char) 0;
+ }
}
diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
index bdcbe14b16..4657605936 100644
--- a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
@@ -46,7 +46,8 @@ public class CheckFieldAdapter extends FieldVisitor {
* this constructor</i>. Instead, they must use the
* {@link #CheckFieldAdapter(int, FieldVisitor)} version.
*
- * @param fv the field visitor to which this adapter must delegate calls.
+ * @param fv
+ * the field visitor to which this adapter must delegate calls.
*/
public CheckFieldAdapter(final FieldVisitor fv) {
this(Opcodes.ASM4, fv);
@@ -55,19 +56,19 @@ public class CheckFieldAdapter extends FieldVisitor {
/**
* Constructs a new {@link CheckFieldAdapter}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param fv the field visitor to which this adapter must delegate calls.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv
+ * the field visitor to which this adapter must delegate calls.
*/
protected CheckFieldAdapter(final int api, final FieldVisitor fv) {
super(api, fv);
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
checkEnd();
CheckMethodAdapter.checkDesc(desc, false);
return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -77,7 +78,8 @@ public class CheckFieldAdapter extends FieldVisitor {
public void visitAttribute(final Attribute attr) {
checkEnd();
if (attr == null) {
- throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ throw new IllegalArgumentException(
+ "Invalid attribute (must not be null)");
}
super.visitAttribute(attr);
}
@@ -91,7 +93,8 @@ public class CheckFieldAdapter extends FieldVisitor {
private void checkEnd() {
if (end) {
- throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ throw new IllegalStateException(
+ "Cannot call a visit method after visitEnd has been called");
}
}
}
diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
index 7549765421..9da01c9d6e 100644
--- a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
@@ -58,7 +58,7 @@ import scala.tools.asm.tree.analysis.BasicVerifier;
* arguments - such as the fact that the given opcode is correct for a given
* visit method. This adapter can also perform some basic data flow checks (more
* precisely those that can be performed without the full class hierarchy - see
- * {@link org.objectweb.asm.tree.analysis.BasicVerifier}). For instance in a
+ * {@link scala.tools.asm.tree.analysis.BasicVerifier}). For instance in a
* method whose signature is <tt>void m ()</tt>, the invalid instruction
* IRETURN, or the invalid sequence IADD L2I will be detected if the data flow
* checks are enabled. These checks are enabled by using the
@@ -75,6 +75,11 @@ public class CheckMethodAdapter extends MethodVisitor {
public int version;
/**
+ * The access flags of the method.
+ */
+ private int access;
+
+ /**
* <tt>true</tt> if the visitCode method has been called.
*/
private boolean startCode;
@@ -107,6 +112,21 @@ public class CheckMethodAdapter extends MethodVisitor {
private Set<Label> usedLabels;
/**
+ * Number of visited frames in expanded form.
+ */
+ private int expandedFrames;
+
+ /**
+ * Number of visited frames in compressed form.
+ */
+ private int compressedFrames;
+
+ /**
+ * Number of instructions before the last visited frame.
+ */
+ private int lastFrame = -1;
+
+ /**
* The exception handler ranges. Each pair of list element contains the
* start and end labels of an exception handler block.
*/
@@ -352,7 +372,8 @@ public class CheckMethodAdapter extends MethodVisitor {
* <i>Subclasses must not use this constructor</i>. Instead, they must use
* the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
*
- * @param mv the method visitor to which this adapter must delegate calls.
+ * @param mv
+ * the method visitor to which this adapter must delegate calls.
*/
public CheckMethodAdapter(final MethodVisitor mv) {
this(mv, new HashMap<Label, Integer>());
@@ -365,13 +386,13 @@ public class CheckMethodAdapter extends MethodVisitor {
* <i>Subclasses must not use this constructor</i>. Instead, they must use
* the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
*
- * @param mv the method visitor to which this adapter must delegate calls.
- * @param labels a map of already visited labels (in other methods).
+ * @param mv
+ * the method visitor to which this adapter must delegate calls.
+ * @param labels
+ * a map of already visited labels (in other methods).
*/
- public CheckMethodAdapter(
- final MethodVisitor mv,
- final Map<Label, Integer> labels)
- {
+ public CheckMethodAdapter(final MethodVisitor mv,
+ final Map<Label, Integer> labels) {
this(Opcodes.ASM4, mv, labels);
}
@@ -380,14 +401,13 @@ public class CheckMethodAdapter extends MethodVisitor {
* will not perform any data flow check (see
* {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
*
- * @param mv the method visitor to which this adapter must delegate calls.
- * @param labels a map of already visited labels (in other methods).
+ * @param mv
+ * the method visitor to which this adapter must delegate calls.
+ * @param labels
+ * a map of already visited labels (in other methods).
*/
- protected CheckMethodAdapter(
- final int api,
- final MethodVisitor mv,
- final Map<Label, Integer> labels)
- {
+ protected CheckMethodAdapter(final int api, final MethodVisitor mv,
+ final Map<Label, Integer> labels) {
super(api, mv);
this.labels = labels;
this.usedLabels = new HashSet<Label>();
@@ -400,30 +420,32 @@ public class CheckMethodAdapter extends MethodVisitor {
* signature is <tt>void m ()</tt>, the invalid instruction IRETURN, or the
* invalid sequence IADD L2I will be detected.
*
- * @param access the method's access flags.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
- * @param cmv the method visitor to which this adapter must delegate calls.
- * @param labels a map of already visited labels (in other methods).
+ * @param access
+ * the method's access flags.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
+ * @param cmv
+ * the method visitor to which this adapter must delegate calls.
+ * @param labels
+ * a map of already visited labels (in other methods).
*/
- public CheckMethodAdapter(
- final int access,
- final String name,
- final String desc,
- final MethodVisitor cmv,
- final Map<Label, Integer> labels)
- {
+ public CheckMethodAdapter(final int access, final String name,
+ final String desc, final MethodVisitor cmv,
+ final Map<Label, Integer> labels) {
this(new MethodNode(access, name, desc, null, null) {
@Override
public void visitEnd() {
- Analyzer<BasicValue> a = new Analyzer<BasicValue>(new BasicVerifier());
+ Analyzer<BasicValue> a = new Analyzer<BasicValue>(
+ new BasicVerifier());
try {
a.analyze("dummy", this);
} catch (Exception e) {
if (e instanceof IndexOutOfBoundsException
- && maxLocals == 0 && maxStack == 0)
- {
- throw new RuntimeException("Data flow checking option requires valid, non zero maxLocals and maxStack values.");
+ && maxLocals == 0 && maxStack == 0) {
+ throw new RuntimeException(
+ "Data flow checking option requires valid, non zero maxLocals and maxStack values.");
}
e.printStackTrace();
StringWriter sw = new StringWriter();
@@ -435,15 +457,13 @@ public class CheckMethodAdapter extends MethodVisitor {
}
accept(cmv);
}
- },
- labels);
+ }, labels);
+ this.access = access;
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
checkEndMethod();
checkDesc(desc, false);
return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -456,68 +476,68 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
checkEndMethod();
checkDesc(desc, false);
- return new CheckAnnotationAdapter(super.visitParameterAnnotation(parameter,
- desc,
- visible));
+ return new CheckAnnotationAdapter(super.visitParameterAnnotation(
+ parameter, desc, visible));
}
@Override
public void visitAttribute(final Attribute attr) {
checkEndMethod();
if (attr == null) {
- throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ throw new IllegalArgumentException(
+ "Invalid attribute (must not be null)");
}
super.visitAttribute(attr);
}
@Override
public void visitCode() {
+ if ((access & Opcodes.ACC_ABSTRACT) != 0) {
+ throw new RuntimeException("Abstract methods cannot have code");
+ }
startCode = true;
super.visitCode();
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
+ if (insnCount == lastFrame) {
+ throw new IllegalStateException(
+ "At most one frame can be visited at a given code location.");
+ }
+ lastFrame = insnCount;
int mLocal;
int mStack;
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- mLocal = Integer.MAX_VALUE;
- mStack = Integer.MAX_VALUE;
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mLocal = Integer.MAX_VALUE;
+ mStack = Integer.MAX_VALUE;
+ break;
- case Opcodes.F_SAME:
- mLocal = 0;
- mStack = 0;
- break;
+ case Opcodes.F_SAME:
+ mLocal = 0;
+ mStack = 0;
+ break;
- case Opcodes.F_SAME1:
- mLocal = 0;
- mStack = 1;
- break;
+ case Opcodes.F_SAME1:
+ mLocal = 0;
+ mStack = 1;
+ break;
- case Opcodes.F_APPEND:
- case Opcodes.F_CHOP:
- mLocal = 3;
- mStack = 0;
- break;
+ case Opcodes.F_APPEND:
+ case Opcodes.F_CHOP:
+ mLocal = 3;
+ mStack = 0;
+ break;
- default:
- throw new IllegalArgumentException("Invalid frame type " + type);
+ default:
+ throw new IllegalArgumentException("Invalid frame type " + type);
}
if (nLocal > mLocal) {
@@ -531,19 +551,29 @@ public class CheckMethodAdapter extends MethodVisitor {
if (type != Opcodes.F_CHOP) {
if (nLocal > 0 && (local == null || local.length < nLocal)) {
- throw new IllegalArgumentException("Array local[] is shorter than nLocal");
+ throw new IllegalArgumentException(
+ "Array local[] is shorter than nLocal");
}
for (int i = 0; i < nLocal; ++i) {
checkFrameValue(local[i]);
}
}
if (nStack > 0 && (stack == null || stack.length < nStack)) {
- throw new IllegalArgumentException("Array stack[] is shorter than nStack");
+ throw new IllegalArgumentException(
+ "Array stack[] is shorter than nStack");
}
for (int i = 0; i < nStack; ++i) {
checkFrameValue(stack[i]);
}
-
+ if (type == Opcodes.F_NEW) {
+ ++expandedFrames;
+ } else {
+ ++compressedFrames;
+ }
+ if (expandedFrames > 0 && compressedFrames > 0) {
+ throw new RuntimeException(
+ "Expanded and compressed frames must not be mixed.");
+ }
super.visitFrame(type, nLocal, local, nStack, stack);
}
@@ -562,18 +592,19 @@ public class CheckMethodAdapter extends MethodVisitor {
checkEndCode();
checkOpcode(opcode, 1);
switch (opcode) {
- case Opcodes.BIPUSH:
- checkSignedByte(operand, "Invalid operand");
- break;
- case Opcodes.SIPUSH:
- checkSignedShort(operand, "Invalid operand");
- break;
- // case Constants.NEWARRAY:
- default:
- if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
- throw new IllegalArgumentException("Invalid operand (must be an array type code T_...): "
- + operand);
- }
+ case Opcodes.BIPUSH:
+ checkSignedByte(operand, "Invalid operand");
+ break;
+ case Opcodes.SIPUSH:
+ checkSignedShort(operand, "Invalid operand");
+ break;
+ // case Constants.NEWARRAY:
+ default:
+ if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
+ throw new IllegalArgumentException(
+ "Invalid operand (must be an array type code T_...): "
+ + operand);
+ }
}
super.visitIntInsn(opcode, operand);
++insnCount;
@@ -596,20 +627,16 @@ public class CheckMethodAdapter extends MethodVisitor {
checkOpcode(opcode, 3);
checkInternalName(type, "type");
if (opcode == Opcodes.NEW && type.charAt(0) == '[') {
- throw new IllegalArgumentException("NEW cannot be used to create arrays: "
- + type);
+ throw new IllegalArgumentException(
+ "NEW cannot be used to create arrays: " + type);
}
super.visitTypeInsn(opcode, type);
++insnCount;
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
checkStartCode();
checkEndCode();
checkOpcode(opcode, 4);
@@ -621,16 +648,14 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
checkStartCode();
checkEndCode();
checkOpcode(opcode, 5);
- checkMethodIdentifier(version, name, "name");
+ if (opcode != Opcodes.INVOKESPECIAL || !"<init>".equals(name)) {
+ checkMethodIdentifier(version, name, "name");
+ }
checkInternalName(owner, "owner");
checkMethodDesc(desc);
super.visitMethodInsn(opcode, owner, name, desc);
@@ -638,19 +663,14 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
checkStartCode();
checkEndCode();
checkMethodIdentifier(version, name, "name");
checkMethodDesc(desc);
if (bsm.getTag() != Opcodes.H_INVOKESTATIC
- && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL)
- {
+ && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL) {
throw new IllegalArgumentException("invalid handle tag "
+ bsm.getTag());
}
@@ -705,12 +725,8 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
checkStartCode();
checkEndCode();
if (max < min) {
@@ -720,7 +736,8 @@ public class CheckMethodAdapter extends MethodVisitor {
checkLabel(dflt, false, "default label");
checkNonDebugLabel(dflt);
if (labels == null || labels.length != max - min + 1) {
- throw new IllegalArgumentException("There must be max - min + 1 labels");
+ throw new IllegalArgumentException(
+ "There must be max - min + 1 labels");
}
for (int i = 0; i < labels.length; ++i) {
checkLabel(labels[i], false, "label at index " + i);
@@ -734,17 +751,15 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
checkEndCode();
checkStartCode();
checkLabel(dflt, false, "default label");
checkNonDebugLabel(dflt);
if (keys == null || labels == null || keys.length != labels.length) {
- throw new IllegalArgumentException("There must be the same number of keys and labels");
+ throw new IllegalArgumentException(
+ "There must be the same number of keys and labels");
}
for (int i = 0; i < labels.length; ++i) {
checkLabel(labels[i], false, "label at index " + i);
@@ -764,28 +779,26 @@ public class CheckMethodAdapter extends MethodVisitor {
checkEndCode();
checkDesc(desc, false);
if (desc.charAt(0) != '[') {
- throw new IllegalArgumentException("Invalid descriptor (must be an array type descriptor): "
- + desc);
+ throw new IllegalArgumentException(
+ "Invalid descriptor (must be an array type descriptor): "
+ + desc);
}
if (dims < 1) {
- throw new IllegalArgumentException("Invalid dimensions (must be greater than 0): "
- + dims);
+ throw new IllegalArgumentException(
+ "Invalid dimensions (must be greater than 0): " + dims);
}
if (dims > desc.lastIndexOf('[') + 1) {
- throw new IllegalArgumentException("Invalid dimensions (must not be greater than dims(desc)): "
- + dims);
+ throw new IllegalArgumentException(
+ "Invalid dimensions (must not be greater than dims(desc)): "
+ + dims);
}
super.visitMultiANewArrayInsn(desc, dims);
++insnCount;
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
checkStartCode();
checkEndCode();
checkLabel(start, false, "start label");
@@ -795,9 +808,9 @@ public class CheckMethodAdapter extends MethodVisitor {
checkNonDebugLabel(end);
checkNonDebugLabel(handler);
if (labels.get(start) != null || labels.get(end) != null
- || labels.get(handler) != null)
- {
- throw new IllegalStateException("Try catch blocks must be visited before their labels");
+ || labels.get(handler) != null) {
+ throw new IllegalStateException(
+ "Try catch blocks must be visited before their labels");
}
if (type != null) {
checkInternalName(type, "type");
@@ -808,14 +821,9 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
checkStartCode();
checkEndCode();
checkUnqualifiedName(version, name, "name");
@@ -826,7 +834,8 @@ public class CheckMethodAdapter extends MethodVisitor {
int s = labels.get(start).intValue();
int e = labels.get(end).intValue();
if (e < s) {
- throw new IllegalArgumentException("Invalid start and end labels (end must be greater than start)");
+ throw new IllegalArgumentException(
+ "Invalid start and end labels (end must be greater than start)");
}
super.visitLocalVariable(name, desc, signature, start, end, index);
}
@@ -850,14 +859,16 @@ public class CheckMethodAdapter extends MethodVisitor {
throw new IllegalStateException("Undefined label used");
}
}
- for (int i = 0; i < handlers.size(); ) {
+ for (int i = 0; i < handlers.size();) {
Integer start = labels.get(handlers.get(i++));
Integer end = labels.get(handlers.get(i++));
if (start == null || end == null) {
- throw new IllegalStateException("Undefined try catch block labels");
+ throw new IllegalStateException(
+ "Undefined try catch block labels");
}
if (end.intValue() <= start.intValue()) {
- throw new IllegalStateException("Emty try catch block handler range");
+ throw new IllegalStateException(
+ "Emty try catch block handler range");
}
}
checkUnsignedShort(maxStack, "Invalid max stack");
@@ -879,7 +890,8 @@ public class CheckMethodAdapter extends MethodVisitor {
*/
void checkStartCode() {
if (!startCode) {
- throw new IllegalStateException("Cannot visit instructions before visitCode has been called.");
+ throw new IllegalStateException(
+ "Cannot visit instructions before visitCode has been called.");
}
}
@@ -888,7 +900,8 @@ public class CheckMethodAdapter extends MethodVisitor {
*/
void checkEndCode() {
if (endCode) {
- throw new IllegalStateException("Cannot visit instructions after visitMaxs has been called.");
+ throw new IllegalStateException(
+ "Cannot visit instructions after visitMaxs has been called.");
}
}
@@ -897,21 +910,22 @@ public class CheckMethodAdapter extends MethodVisitor {
*/
void checkEndMethod() {
if (endMethod) {
- throw new IllegalStateException("Cannot visit elements after visitEnd has been called.");
+ throw new IllegalStateException(
+ "Cannot visit elements after visitEnd has been called.");
}
}
/**
* Checks a stack frame value.
*
- * @param value the value to be checked.
+ * @param value
+ * the value to be checked.
*/
void checkFrameValue(final Object value) {
if (value == Opcodes.TOP || value == Opcodes.INTEGER
|| value == Opcodes.FLOAT || value == Opcodes.LONG
|| value == Opcodes.DOUBLE || value == Opcodes.NULL
- || value == Opcodes.UNINITIALIZED_THIS)
- {
+ || value == Opcodes.UNINITIALIZED_THIS) {
return;
}
if (value instanceof String) {
@@ -929,8 +943,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the type of the given opcode is equal to the given type.
*
- * @param opcode the opcode to be checked.
- * @param type the expected opcode type.
+ * @param opcode
+ * the opcode to be checked.
+ * @param type
+ * the expected opcode type.
*/
static void checkOpcode(final int opcode, final int type) {
if (opcode < 0 || opcode > 199 || TYPE[opcode] != type) {
@@ -941,8 +957,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given value is a signed byte.
*
- * @param value the value to be checked.
- * @param msg an message to be used in case of error.
+ * @param value
+ * the value to be checked.
+ * @param msg
+ * an message to be used in case of error.
*/
static void checkSignedByte(final int value, final String msg) {
if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
@@ -954,8 +972,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given value is a signed short.
*
- * @param value the value to be checked.
- * @param msg an message to be used in case of error.
+ * @param value
+ * the value to be checked.
+ * @param msg
+ * an message to be used in case of error.
*/
static void checkSignedShort(final int value, final String msg) {
if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
@@ -967,8 +987,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given value is an unsigned short.
*
- * @param value the value to be checked.
- * @param msg an message to be used in case of error.
+ * @param value
+ * the value to be checked.
+ * @param msg
+ * an message to be used in case of error.
*/
static void checkUnsignedShort(final int value, final String msg) {
if (value < 0 || value > 65535) {
@@ -981,13 +1003,13 @@ public class CheckMethodAdapter extends MethodVisitor {
* Checks that the given value is an {@link Integer}, a{@link Float}, a
* {@link Long}, a {@link Double} or a {@link String}.
*
- * @param cst the value to be checked.
+ * @param cst
+ * the value to be checked.
*/
static void checkConstant(final Object cst) {
if (!(cst instanceof Integer) && !(cst instanceof Float)
&& !(cst instanceof Long) && !(cst instanceof Double)
- && !(cst instanceof String))
- {
+ && !(cst instanceof String)) {
throw new IllegalArgumentException("Invalid constant: " + cst);
}
}
@@ -999,19 +1021,21 @@ public class CheckMethodAdapter extends MethodVisitor {
throw new IllegalArgumentException("Illegal LDC constant value");
}
if (s != Type.METHOD && (version & 0xFFFF) < Opcodes.V1_5) {
- throw new IllegalArgumentException("ldc of a constant class requires at least version 1.5");
+ throw new IllegalArgumentException(
+ "ldc of a constant class requires at least version 1.5");
}
if (s == Type.METHOD && (version & 0xFFFF) < Opcodes.V1_7) {
- throw new IllegalArgumentException("ldc of a method type requires at least version 1.7");
+ throw new IllegalArgumentException(
+ "ldc of a method type requires at least version 1.7");
}
} else if (cst instanceof Handle) {
if ((version & 0xFFFF) < Opcodes.V1_7) {
- throw new IllegalArgumentException("ldc of a handle requires at least version 1.7");
+ throw new IllegalArgumentException(
+ "ldc of a handle requires at least version 1.7");
}
int tag = ((Handle) cst).getTag();
if (tag < Opcodes.H_GETFIELD || tag > Opcodes.H_INVOKEINTERFACE) {
- throw new IllegalArgumentException("invalid handle tag "
- + tag);
+ throw new IllegalArgumentException("invalid handle tag " + tag);
}
} else {
checkConstant(cst);
@@ -1021,15 +1045,15 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given string is a valid unqualified name.
*
- * @param version the class version.
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param version
+ * the class version.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkUnqualifiedName(
- int version,
- final String name,
- final String msg)
- {
+ static void checkUnqualifiedName(int version, final String name,
+ final String msg) {
if ((version & 0xFFFF) < Opcodes.V1_5) {
checkIdentifier(name, msg);
} else {
@@ -1045,8 +1069,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given string is a valid Java identifier.
*
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
static void checkIdentifier(final String name, final String msg) {
checkIdentifier(name, 0, -1, msg);
@@ -1055,21 +1081,20 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given substring is a valid Java identifier.
*
- * @param name the string to be checked.
- * @param start index of the first character of the identifier (inclusive).
- * @param end index of the last character of the identifier (exclusive). -1
- * is equivalent to <tt>name.length()</tt> if name is not
- * <tt>null</tt>.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param start
+ * index of the first character of the identifier (inclusive).
+ * @param end
+ * index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkIdentifier(
- final String name,
- final int start,
- final int end,
- final String msg)
- {
- if (name == null || (end == -1 ? name.length() <= start : end <= start))
- {
+ static void checkIdentifier(final String name, final int start,
+ final int end, final String msg) {
+ if (name == null || (end == -1 ? name.length() <= start : end <= start)) {
throw new IllegalArgumentException("Invalid " + msg
+ " (must not be null or empty)");
}
@@ -1087,25 +1112,21 @@ public class CheckMethodAdapter extends MethodVisitor {
}
/**
- * Checks that the given string is a valid Java identifier or is equal to
- * '&lt;init&gt;' or '&lt;clinit&gt;'.
+ * Checks that the given string is a valid Java identifier.
*
- * @param version the class version.
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param version
+ * the class version.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkMethodIdentifier(
- int version,
- final String name,
- final String msg)
- {
+ static void checkMethodIdentifier(int version, final String name,
+ final String msg) {
if (name == null || name.length() == 0) {
throw new IllegalArgumentException("Invalid " + msg
+ " (must not be null or empty)");
}
- if ("<init>".equals(name) || "<clinit>".equals(name)) {
- return;
- }
if ((version & 0xFFFF) >= Opcodes.V1_5) {
for (int i = 0; i < name.length(); ++i) {
if (".;[/<>".indexOf(name.charAt(i)) != -1) {
@@ -1116,17 +1137,19 @@ public class CheckMethodAdapter extends MethodVisitor {
return;
}
if (!Character.isJavaIdentifierStart(name.charAt(0))) {
- throw new IllegalArgumentException("Invalid "
- + msg
- + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
- + name);
+ throw new IllegalArgumentException(
+ "Invalid "
+ + msg
+ + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
+ + name);
}
for (int i = 1; i < name.length(); ++i) {
if (!Character.isJavaIdentifierPart(name.charAt(i))) {
- throw new IllegalArgumentException("Invalid "
- + msg
- + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
- + name);
+ throw new IllegalArgumentException(
+ "Invalid "
+ + msg
+ + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
+ + name);
}
}
}
@@ -1134,8 +1157,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given string is a valid internal class name.
*
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
static void checkInternalName(final String name, final String msg) {
if (name == null || name.length() == 0) {
@@ -1152,19 +1177,19 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given substring is a valid internal class name.
*
- * @param name the string to be checked.
- * @param start index of the first character of the identifier (inclusive).
- * @param end index of the last character of the identifier (exclusive). -1
- * is equivalent to <tt>name.length()</tt> if name is not
- * <tt>null</tt>.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param start
+ * index of the first character of the identifier (inclusive).
+ * @param end
+ * index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkInternalName(
- final String name,
- final int start,
- final int end,
- final String msg)
- {
+ static void checkInternalName(final String name, final int start,
+ final int end, final String msg) {
int max = end == -1 ? name.length() : end;
try {
int begin = start;
@@ -1178,18 +1203,21 @@ public class CheckMethodAdapter extends MethodVisitor {
begin = slash + 1;
} while (slash != max);
} catch (IllegalArgumentException _) {
- throw new IllegalArgumentException("Invalid "
- + msg
- + " (must be a fully qualified class name in internal form): "
- + name);
+ throw new IllegalArgumentException(
+ "Invalid "
+ + msg
+ + " (must be a fully qualified class name in internal form): "
+ + name);
}
}
/**
* Checks that the given string is a valid type descriptor.
*
- * @param desc the string to be checked.
- * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ * @param desc
+ * the string to be checked.
+ * @param canBeVoid
+ * <tt>true</tt> if <tt>V</tt> can be considered valid.
*/
static void checkDesc(final String desc, final boolean canBeVoid) {
int end = checkDesc(desc, 0, canBeVoid);
@@ -1201,75 +1229,77 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that a the given substring is a valid type descriptor.
*
- * @param desc the string to be checked.
- * @param start index of the first character of the identifier (inclusive).
- * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ * @param desc
+ * the string to be checked.
+ * @param start
+ * index of the first character of the identifier (inclusive).
+ * @param canBeVoid
+ * <tt>true</tt> if <tt>V</tt> can be considered valid.
* @return the index of the last character of the type decriptor, plus one.
*/
- static int checkDesc(
- final String desc,
- final int start,
- final boolean canBeVoid)
- {
+ static int checkDesc(final String desc, final int start,
+ final boolean canBeVoid) {
if (desc == null || start >= desc.length()) {
- throw new IllegalArgumentException("Invalid type descriptor (must not be null or empty)");
+ throw new IllegalArgumentException(
+ "Invalid type descriptor (must not be null or empty)");
}
int index;
switch (desc.charAt(start)) {
- case 'V':
- if (canBeVoid) {
- return start + 1;
- } else {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- case 'F':
- case 'J':
- case 'D':
+ case 'V':
+ if (canBeVoid) {
return start + 1;
- case '[':
- index = start + 1;
- while (index < desc.length() && desc.charAt(index) == '[') {
- ++index;
- }
- if (index < desc.length()) {
- return checkDesc(desc, index, false);
- } else {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- case 'L':
- index = desc.indexOf(';', start);
- if (index == -1 || index - start < 2) {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- try {
- checkInternalName(desc, start + 1, index, null);
- } catch (IllegalArgumentException _) {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- return index + 1;
- default:
+ } else {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return start + 1;
+ case '[':
+ index = start + 1;
+ while (index < desc.length() && desc.charAt(index) == '[') {
+ ++index;
+ }
+ if (index < desc.length()) {
+ return checkDesc(desc, index, false);
+ } else {
throw new IllegalArgumentException("Invalid descriptor: "
+ desc);
+ }
+ case 'L':
+ index = desc.indexOf(';', start);
+ if (index == -1 || index - start < 2) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ try {
+ checkInternalName(desc, start + 1, index, null);
+ } catch (IllegalArgumentException _) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ return index + 1;
+ default:
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
}
}
/**
* Checks that the given string is a valid method descriptor.
*
- * @param desc the string to be checked.
+ * @param desc
+ * the string to be checked.
*/
static void checkMethodDesc(final String desc) {
if (desc == null || desc.length() == 0) {
- throw new IllegalArgumentException("Invalid method descriptor (must not be null or empty)");
+ throw new IllegalArgumentException(
+ "Invalid method descriptor (must not be null or empty)");
}
if (desc.charAt(0) != '(' || desc.length() < 3) {
throw new IllegalArgumentException("Invalid descriptor: " + desc);
@@ -1291,322 +1321,18 @@ public class CheckMethodAdapter extends MethodVisitor {
}
/**
- * Checks a class signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- static void checkClassSignature(final String signature) {
- // ClassSignature:
- // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
-
- int pos = 0;
- if (getChar(signature, 0) == '<') {
- pos = checkFormalTypeParameters(signature, pos);
- }
- pos = checkClassTypeSignature(signature, pos);
- while (getChar(signature, pos) == 'L') {
- pos = checkClassTypeSignature(signature, pos);
- }
- if (pos != signature.length()) {
- throw new IllegalArgumentException(signature + ": error at index "
- + pos);
- }
- }
-
- /**
- * Checks a method signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- static void checkMethodSignature(final String signature) {
- // MethodTypeSignature:
- // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
- // ^ClassTypeSignature | ^TypeVariableSignature )*
-
- int pos = 0;
- if (getChar(signature, 0) == '<') {
- pos = checkFormalTypeParameters(signature, pos);
- }
- pos = checkChar('(', signature, pos);
- while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
- pos = checkTypeSignature(signature, pos);
- }
- pos = checkChar(')', signature, pos);
- if (getChar(signature, pos) == 'V') {
- ++pos;
- } else {
- pos = checkTypeSignature(signature, pos);
- }
- while (getChar(signature, pos) == '^') {
- ++pos;
- if (getChar(signature, pos) == 'L') {
- pos = checkClassTypeSignature(signature, pos);
- } else {
- pos = checkTypeVariableSignature(signature, pos);
- }
- }
- if (pos != signature.length()) {
- throw new IllegalArgumentException(signature + ": error at index "
- + pos);
- }
- }
-
- /**
- * Checks a field signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- static void checkFieldSignature(final String signature) {
- int pos = checkFieldTypeSignature(signature, 0);
- if (pos != signature.length()) {
- throw new IllegalArgumentException(signature + ": error at index "
- + pos);
- }
- }
-
- /**
- * Checks the formal type parameters of a class or method signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkFormalTypeParameters(final String signature, int pos)
- {
- // FormalTypeParameters:
- // < FormalTypeParameter+ >
-
- pos = checkChar('<', signature, pos);
- pos = checkFormalTypeParameter(signature, pos);
- while (getChar(signature, pos) != '>') {
- pos = checkFormalTypeParameter(signature, pos);
- }
- return pos + 1;
- }
-
- /**
- * Checks a formal type parameter of a class or method signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkFormalTypeParameter(final String signature, int pos)
- {
- // FormalTypeParameter:
- // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
-
- pos = checkIdentifier(signature, pos);
- pos = checkChar(':', signature, pos);
- if ("L[T".indexOf(getChar(signature, pos)) != -1) {
- pos = checkFieldTypeSignature(signature, pos);
- }
- while (getChar(signature, pos) == ':') {
- pos = checkFieldTypeSignature(signature, pos + 1);
- }
- return pos;
- }
-
- /**
- * Checks a field type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkFieldTypeSignature(final String signature, int pos)
- {
- // FieldTypeSignature:
- // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
- //
- // ArrayTypeSignature:
- // [ TypeSignature
-
- switch (getChar(signature, pos)) {
- case 'L':
- return checkClassTypeSignature(signature, pos);
- case '[':
- return checkTypeSignature(signature, pos + 1);
- default:
- return checkTypeVariableSignature(signature, pos);
- }
- }
-
- /**
- * Checks a class type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkClassTypeSignature(final String signature, int pos)
- {
- // ClassTypeSignature:
- // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
- // TypeArguments? )* ;
-
- pos = checkChar('L', signature, pos);
- pos = checkIdentifier(signature, pos);
- while (getChar(signature, pos) == '/') {
- pos = checkIdentifier(signature, pos + 1);
- }
- if (getChar(signature, pos) == '<') {
- pos = checkTypeArguments(signature, pos);
- }
- while (getChar(signature, pos) == '.') {
- pos = checkIdentifier(signature, pos + 1);
- if (getChar(signature, pos) == '<') {
- pos = checkTypeArguments(signature, pos);
- }
- }
- return checkChar(';', signature, pos);
- }
-
- /**
- * Checks the type arguments in a class type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeArguments(final String signature, int pos) {
- // TypeArguments:
- // < TypeArgument+ >
-
- pos = checkChar('<', signature, pos);
- pos = checkTypeArgument(signature, pos);
- while (getChar(signature, pos) != '>') {
- pos = checkTypeArgument(signature, pos);
- }
- return pos + 1;
- }
-
- /**
- * Checks a type argument in a class type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeArgument(final String signature, int pos) {
- // TypeArgument:
- // * | ( ( + | - )? FieldTypeSignature )
-
- char c = getChar(signature, pos);
- if (c == '*') {
- return pos + 1;
- } else if (c == '+' || c == '-') {
- pos++;
- }
- return checkFieldTypeSignature(signature, pos);
- }
-
- /**
- * Checks a type variable signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeVariableSignature(
- final String signature,
- int pos)
- {
- // TypeVariableSignature:
- // T Identifier ;
-
- pos = checkChar('T', signature, pos);
- pos = checkIdentifier(signature, pos);
- return checkChar(';', signature, pos);
- }
-
- /**
- * Checks a type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeSignature(final String signature, int pos) {
- // TypeSignature:
- // Z | C | B | S | I | F | J | D | FieldTypeSignature
-
- switch (getChar(signature, pos)) {
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- case 'F':
- case 'J':
- case 'D':
- return pos + 1;
- default:
- return checkFieldTypeSignature(signature, pos);
- }
- }
-
- /**
- * Checks an identifier.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkIdentifier(final String signature, int pos) {
- if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
- throw new IllegalArgumentException(signature
- + ": identifier expected at index " + pos);
- }
- ++pos;
- while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
- ++pos;
- }
- return pos;
- }
-
- /**
- * Checks a single character.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkChar(final char c, final String signature, int pos)
- {
- if (getChar(signature, pos) == c) {
- return pos + 1;
- }
- throw new IllegalArgumentException(signature + ": '" + c
- + "' expected at index " + pos);
- }
-
- /**
- * Returns the signature car at the given index.
- *
- * @param signature a signature.
- * @param pos an index in signature.
- * @return the character at the given index, or 0 if there is no such
- * character.
- */
- private static char getChar(final String signature, int pos) {
- return pos < signature.length() ? signature.charAt(pos) : (char) 0;
- }
-
- /**
* Checks that the given label is not null. This method can also check that
* the label has been visited.
*
- * @param label the label to be checked.
- * @param checkVisited <tt>true</tt> to check that the label has been
- * visited.
- * @param msg a message to be used in case of error.
+ * @param label
+ * the label to be checked.
+ * @param checkVisited
+ * <tt>true</tt> to check that the label has been visited.
+ * @param msg
+ * a message to be used in case of error.
*/
- void checkLabel(
- final Label label,
- final boolean checkVisited,
- final String msg)
- {
+ void checkLabel(final Label label, final boolean checkVisited,
+ final String msg) {
if (label == null) {
throw new IllegalArgumentException("Invalid " + msg
+ " (must not be null)");
@@ -1620,7 +1346,8 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given label is not a label used only for debug purposes.
*
- * @param label the label to be checked.
+ * @param label
+ * the label to be checked.
*/
private static void checkNonDebugLabel(final Label label) {
Field f = getLabelStatusField();
@@ -1631,7 +1358,8 @@ public class CheckMethodAdapter extends MethodVisitor {
throw new Error("Internal error");
}
if ((status & 0x01) != 0) {
- throw new IllegalArgumentException("Labels used for debug info cannot be reused for control flow");
+ throw new IllegalArgumentException(
+ "Labels used for debug info cannot be reused for control flow");
}
}
@@ -1653,7 +1381,8 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Returns the field of the Label class whose name is given.
*
- * @param name a field name.
+ * @param name
+ * a field name.
* @return the field of the Label class whose name is given, or null.
*/
private static Field getLabelField(final String name) {
diff --git a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
index 3a6c3e780f..e69302b8a6 100644
--- a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
@@ -41,19 +41,22 @@ public class CheckSignatureAdapter extends SignatureVisitor {
/**
* Type to be used to check class signatures. See
- * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+ * CheckSignatureAdapter}.
*/
public static final int CLASS_SIGNATURE = 0;
/**
* Type to be used to check method signatures. See
- * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+ * CheckSignatureAdapter}.
*/
public static final int METHOD_SIGNATURE = 1;
/**
* Type to be used to check type signatures.See
- * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+ * CheckSignatureAdapter}.
*/
public static final int TYPE_SIGNATURE = 2;
@@ -101,11 +104,13 @@ public class CheckSignatureAdapter extends SignatureVisitor {
* not use this constructor</i>. Instead, they must use the
* {@link #CheckSignatureAdapter(int, int, SignatureVisitor)} version.
*
- * @param type the type of signature to be checked. See
- * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
- * {@link #TYPE_SIGNATURE}.
- * @param sv the visitor to which this adapter must delegate calls. May be
- * <tt>null</tt>.
+ * @param type
+ * the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv
+ * the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
*/
public CheckSignatureAdapter(final int type, final SignatureVisitor sv) {
this(Opcodes.ASM4, type, sv);
@@ -114,19 +119,19 @@ public class CheckSignatureAdapter extends SignatureVisitor {
/**
* Creates a new {@link CheckSignatureAdapter} object.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param type the type of signature to be checked. See
- * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
- * {@link #TYPE_SIGNATURE}.
- * @param sv the visitor to which this adapter must delegate calls. May be
- * <tt>null</tt>.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param type
+ * the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv
+ * the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
*/
- protected CheckSignatureAdapter(
- final int api,
- final int type,
- final SignatureVisitor sv)
- {
+ protected CheckSignatureAdapter(final int api, final int type,
+ final SignatureVisitor sv) {
super(api);
this.type = type;
this.state = EMPTY;
@@ -138,8 +143,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public void visitFormalTypeParameter(final String name) {
if (type == TYPE_SIGNATURE
- || (state != EMPTY && state != FORMAL && state != BOUND))
- {
+ || (state != EMPTY && state != FORMAL && state != BOUND)) {
throw new IllegalStateException();
}
CheckMethodAdapter.checkIdentifier(name, "formal type parameter");
@@ -172,8 +176,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public SignatureVisitor visitSuperclass() {
- if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0)
- {
+ if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0) {
throw new IllegalArgumentException();
}
state = SUPER;
@@ -195,8 +198,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public SignatureVisitor visitParameterType() {
if (type != METHOD_SIGNATURE
- || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
- {
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0) {
throw new IllegalArgumentException();
}
state = PARAM;
@@ -207,8 +209,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public SignatureVisitor visitReturnType() {
if (type != METHOD_SIGNATURE
- || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
- {
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0) {
throw new IllegalArgumentException();
}
state = RETURN;
diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
index c39fd548ce..86e0f9e122 100644
--- a/src/asm/scala/tools/asm/util/Printer.java
+++ b/src/asm/scala/tools/asm/util/Printer.java
@@ -52,14 +52,14 @@ public abstract class Printer {
/**
* The names of the for <code>operand</code> parameter values of the
- * {@link org.objectweb.asm.MethodVisitor#visitIntInsn} method when
+ * {@link scala.tools.asm.MethodVisitor#visitIntInsn} method when
* <code>opcode</code> is <code>NEWARRAY</code>.
*/
public static final String[] TYPES;
/**
* The names of the <code>tag</code> field values for
- * {@link org.objectweb.asm.Handle}.
+ * {@link scala.tools.asm.Handle}.
*/
public static final String[] HANDLE_TAG;
@@ -103,8 +103,8 @@ public abstract class Printer {
}
s = "H_GETFIELD,H_GETSTATIC,H_PUTFIELD,H_PUTSTATIC,"
- + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
- + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
+ + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
+ + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
HANDLE_TAG = new String[10];
j = 0;
i = 1;
@@ -149,81 +149,58 @@ public abstract class Printer {
}
/**
- * Class header.
- * See {@link org.objectweb.asm.ClassVisitor#visit}.
+ * Class header. See {@link scala.tools.asm.ClassVisitor#visit}.
*/
- public abstract void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces);
+ public abstract void visit(final int version, final int access,
+ final String name, final String signature, final String superName,
+ final String[] interfaces);
/**
- * Class source.
- * See {@link org.objectweb.asm.ClassVisitor#visitSource}.
+ * Class source. See {@link scala.tools.asm.ClassVisitor#visitSource}.
*/
public abstract void visitSource(final String file, final String debug);
/**
- * Class outer class.
- * See {@link org.objectweb.asm.ClassVisitor#visitOuterClass}.
+ * Class outer class. See
+ * {@link scala.tools.asm.ClassVisitor#visitOuterClass}.
*/
- public abstract void visitOuterClass(
- final String owner,
- final String name,
- final String desc);
+ public abstract void visitOuterClass(final String owner, final String name,
+ final String desc);
/**
- * Class annotation.
- * See {@link org.objectweb.asm.ClassVisitor#visitAnnotation}.
+ * Class annotation. See
+ * {@link scala.tools.asm.ClassVisitor#visitAnnotation}.
*/
- public abstract Printer visitClassAnnotation(
- final String desc,
- final boolean visible);
+ public abstract Printer visitClassAnnotation(final String desc,
+ final boolean visible);
/**
- * Class attribute.
- * See {@link org.objectweb.asm.ClassVisitor#visitAttribute}.
+ * Class attribute. See
+ * {@link scala.tools.asm.ClassVisitor#visitAttribute}.
*/
public abstract void visitClassAttribute(final Attribute attr);
/**
- * Class inner name.
- * See {@link org.objectweb.asm.ClassVisitor#visitInnerClass}.
+ * Class inner name. See
+ * {@link scala.tools.asm.ClassVisitor#visitInnerClass}.
*/
- public abstract void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access);
+ public abstract void visitInnerClass(final String name,
+ final String outerName, final String innerName, final int access);
/**
- * Class field.
- * See {@link org.objectweb.asm.ClassVisitor#visitField}.
+ * Class field. See {@link scala.tools.asm.ClassVisitor#visitField}.
*/
- public abstract Printer visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value);
+ public abstract Printer visitField(final int access, final String name,
+ final String desc, final String signature, final Object value);
/**
- * Class method.
- * See {@link org.objectweb.asm.ClassVisitor#visitMethod}.
+ * Class method. See {@link scala.tools.asm.ClassVisitor#visitMethod}.
*/
- public abstract Printer visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions);
+ public abstract Printer visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions);
/**
- * Class end.
- * See {@link org.objectweb.asm.ClassVisitor#visitEnd}.
+ * Class end. See {@link scala.tools.asm.ClassVisitor#visitEnd}.
*/
public abstract void visitClassEnd();
@@ -232,37 +209,31 @@ public abstract class Printer {
// ------------------------------------------------------------------------
/**
- * Annotation value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visit}.
+ * Annotation value. See {@link scala.tools.asm.AnnotationVisitor#visit}.
*/
public abstract void visit(final String name, final Object value);
/**
- * Annotation enum value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitEnum}.
+ * Annotation enum value. See
+ * {@link scala.tools.asm.AnnotationVisitor#visitEnum}.
*/
- public abstract void visitEnum(
- final String name,
- final String desc,
- final String value);
+ public abstract void visitEnum(final String name, final String desc,
+ final String value);
/**
- * Nested annotation value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitAnnotation}.
+ * Nested annotation value. See
+ * {@link scala.tools.asm.AnnotationVisitor#visitAnnotation}.
*/
- public abstract Printer visitAnnotation(
- final String name,
- final String desc);
+ public abstract Printer visitAnnotation(final String name, final String desc);
/**
- * Annotation array value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitArray}.
+ * Annotation array value. See
+ * {@link scala.tools.asm.AnnotationVisitor#visitArray}.
*/
public abstract Printer visitArray(final String name);
/**
- * Annotation end.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitEnd}.
+ * Annotation end. See {@link scala.tools.asm.AnnotationVisitor#visitEnd}.
*/
public abstract void visitAnnotationEnd();
@@ -271,22 +242,20 @@ public abstract class Printer {
// ------------------------------------------------------------------------
/**
- * Field annotation.
- * See {@link org.objectweb.asm.FieldVisitor#visitAnnotation}.
+ * Field annotation. See
+ * {@link scala.tools.asm.FieldVisitor#visitAnnotation}.
*/
- public abstract Printer visitFieldAnnotation(
- final String desc,
- final boolean visible);
+ public abstract Printer visitFieldAnnotation(final String desc,
+ final boolean visible);
/**
- * Field attribute.
- * See {@link org.objectweb.asm.FieldVisitor#visitAttribute}.
+ * Field attribute. See
+ * {@link scala.tools.asm.FieldVisitor#visitAttribute}.
*/
public abstract void visitFieldAttribute(final Attribute attr);
/**
- * Field end.
- * See {@link org.objectweb.asm.FieldVisitor#visitEnd}.
+ * Field end. See {@link scala.tools.asm.FieldVisitor#visitEnd}.
*/
public abstract void visitFieldEnd();
@@ -295,193 +264,161 @@ public abstract class Printer {
// ------------------------------------------------------------------------
/**
- * Method default annotation.
- * See {@link org.objectweb.asm.MethodVisitor#visitAnnotationDefault}.
+ * Method default annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitAnnotationDefault}.
*/
public abstract Printer visitAnnotationDefault();
/**
- * Method annotation.
- * See {@link org.objectweb.asm.MethodVisitor#visitAnnotation}.
+ * Method annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitAnnotation}.
*/
- public abstract Printer visitMethodAnnotation(
- final String desc,
- final boolean visible);
+ public abstract Printer visitMethodAnnotation(final String desc,
+ final boolean visible);
/**
- * Method parameter annotation.
- * See {@link org.objectweb.asm.MethodVisitor#visitParameterAnnotation}.
+ * Method parameter annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitParameterAnnotation}.
*/
- public abstract Printer visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible);
+ public abstract Printer visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible);
/**
- * Method attribute.
- * See {@link org.objectweb.asm.MethodVisitor#visitAttribute}.
+ * Method attribute. See
+ * {@link scala.tools.asm.MethodVisitor#visitAttribute}.
*/
public abstract void visitMethodAttribute(final Attribute attr);
/**
- * Method start.
- * See {@link org.objectweb.asm.MethodVisitor#visitCode}.
+ * Method start. See {@link scala.tools.asm.MethodVisitor#visitCode}.
*/
public abstract void visitCode();
/**
- * Method stack frame.
- * See {@link org.objectweb.asm.MethodVisitor#visitFrame}.
+ * Method stack frame. See
+ * {@link scala.tools.asm.MethodVisitor#visitFrame}.
*/
- public abstract void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack);
+ public abstract void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitInsn}.
+ * Method instruction. See {@link scala.tools.asm.MethodVisitor#visitInsn}
+ * .
*/
public abstract void visitInsn(final int opcode);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitIntInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitIntInsn}.
*/
public abstract void visitIntInsn(final int opcode, final int operand);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitVarInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitVarInsn}.
*/
public abstract void visitVarInsn(final int opcode, final int var);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitTypeInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitTypeInsn}.
*/
public abstract void visitTypeInsn(final int opcode, final String type);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitFieldInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitFieldInsn}.
*/
- public abstract void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc);
+ public abstract void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitMethodInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitMethodInsn}.
*/
- public abstract void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc);
+ public abstract void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitInvokeDynamicInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitInvokeDynamicInsn}.
*/
- public abstract void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs);
+ public abstract void visitInvokeDynamicInsn(String name, String desc,
+ Handle bsm, Object... bsmArgs);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitJumpInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitJumpInsn}.
*/
public abstract void visitJumpInsn(final int opcode, final Label label);
/**
- * Method label.
- * See {@link org.objectweb.asm.MethodVisitor#visitLabel}.
+ * Method label. See {@link scala.tools.asm.MethodVisitor#visitLabel}.
*/
public abstract void visitLabel(final Label label);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitLdcInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitLdcInsn}.
*/
public abstract void visitLdcInsn(final Object cst);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitIincInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitIincInsn}.
*/
public abstract void visitIincInsn(final int var, final int increment);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitTableSwitchInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitTableSwitchInsn}.
*/
- public abstract void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels);
+ public abstract void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitLookupSwitchInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitLookupSwitchInsn}.
*/
- public abstract void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels);
+ public abstract void visitLookupSwitchInsn(final Label dflt,
+ final int[] keys, final Label[] labels);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitMultiANewArrayInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitMultiANewArrayInsn}.
*/
- public abstract void visitMultiANewArrayInsn(
- final String desc,
- final int dims);
+ public abstract void visitMultiANewArrayInsn(final String desc,
+ final int dims);
/**
- * Method exception handler.
- * See {@link org.objectweb.asm.MethodVisitor#visitTryCatchBlock}.
+ * Method exception handler. See
+ * {@link scala.tools.asm.MethodVisitor#visitTryCatchBlock}.
*/
- public abstract void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type);
+ public abstract void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type);
/**
- * Method debug info.
- * See {@link org.objectweb.asm.MethodVisitor#visitLocalVariable}.
+ * Method debug info. See
+ * {@link scala.tools.asm.MethodVisitor#visitLocalVariable}.
*/
- public abstract void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index);
+ public abstract void visitLocalVariable(final String name,
+ final String desc, final String signature, final Label start,
+ final Label end, final int index);
/**
- * Method debug info.
- * See {@link org.objectweb.asm.MethodVisitor#visitLineNumber}.
+ * Method debug info. See
+ * {@link scala.tools.asm.MethodVisitor#visitLineNumber}.
*/
public abstract void visitLineNumber(final int line, final Label start);
/**
- * Method max stack and max locals.
- * See {@link org.objectweb.asm.MethodVisitor#visitMaxs}.
+ * Method max stack and max locals. See
+ * {@link scala.tools.asm.MethodVisitor#visitMaxs}.
*/
public abstract void visitMaxs(final int maxStack, final int maxLocals);
/**
- * Method end.
- * See {@link org.objectweb.asm.MethodVisitor#visitEnd}.
+ * Method end. See {@link scala.tools.asm.MethodVisitor#visitEnd}.
*/
public abstract void visitMethodEnd();
@@ -497,7 +434,8 @@ public abstract class Printer {
/**
* Prints the text constructed by this visitor.
*
- * @param pw the print writer to be used.
+ * @param pw
+ * the print writer to be used.
*/
public void print(final PrintWriter pw) {
printList(pw, text);
@@ -506,8 +444,10 @@ public abstract class Printer {
/**
* Appends a quoted string to a given buffer.
*
- * @param buf the buffer where the string must be added.
- * @param s the string to be added.
+ * @param buf
+ * the buffer where the string must be added.
+ * @param s
+ * the string to be added.
*/
public static void appendString(final StringBuffer buf, final String s) {
buf.append('\"');
@@ -541,9 +481,11 @@ public abstract class Printer {
/**
* Prints the given string tree.
*
- * @param pw the writer to be used to print the tree.
- * @param l a string tree, i.e., a string list that can contain other string
- * lists, and so on recursively.
+ * @param pw
+ * the writer to be used to print the tree.
+ * @param l
+ * a string tree, i.e., a string list that can contain other
+ * string lists, and so on recursively.
*/
static void printList(final PrintWriter pw, final List<?> l) {
for (int i = 0; i < l.size(); ++i) {
diff --git a/src/asm/scala/tools/asm/util/SignatureChecker.java b/src/asm/scala/tools/asm/util/SignatureChecker.java
deleted file mode 100644
index 71f0d80027..0000000000
--- a/src/asm/scala/tools/asm/util/SignatureChecker.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.tools.asm.util;
-
-import scala.tools.asm.util.CheckMethodAdapter;
-import scala.tools.asm.MethodVisitor;
-
-/**
- * A subclass of ASM's CheckMethodAdapter for the sole purpose of accessing some protected methods there.
- *
- */
-public class SignatureChecker extends CheckMethodAdapter {
-
- public SignatureChecker(final MethodVisitor mv) {
- super(mv);
- }
-
- /**
- * Checks a class signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- public static void checkClassSignature(final String signature) {
- CheckMethodAdapter.checkClassSignature(signature);
- }
-
- /**
- * Checks a method signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- public static void checkMethodSignature(final String signature) {
- CheckMethodAdapter.checkMethodSignature(signature);
- }
-
- /**
- * Checks a field signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- public static void checkFieldSignature(final String signature) {
- CheckMethodAdapter.checkFieldSignature(signature);
- }
-
-}
diff --git a/src/asm/scala/tools/asm/util/Textifiable.java b/src/asm/scala/tools/asm/util/Textifiable.java
index b80d0139db..85e051e2f8 100644
--- a/src/asm/scala/tools/asm/util/Textifiable.java
+++ b/src/asm/scala/tools/asm/util/Textifiable.java
@@ -34,7 +34,7 @@ import java.util.Map;
import scala.tools.asm.Label;
/**
- * An {@link org.objectweb.asm.Attribute Attribute} that can print a readable
+ * An {@link scala.tools.asm.Attribute Attribute} that can print a readable
* representation of itself.
*
* Implementations should construct readable output from an attribute data
@@ -47,8 +47,10 @@ public interface Textifiable {
/**
* Build a human readable representation of this attribute.
*
- * @param buf a buffer used for printing Java code.
- * @param labelNames map of label instances to their names.
+ * @param buf
+ * a buffer used for printing Java code.
+ * @param labelNames
+ * map of label instances to their names.
*/
void textify(StringBuffer buf, Map<Label, String> labelNames);
}
diff --git a/src/asm/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java
index 8d40ebd026..a5c4f6779e 100644
--- a/src/asm/scala/tools/asm/util/Textifier.java
+++ b/src/asm/scala/tools/asm/util/Textifier.java
@@ -149,22 +149,24 @@ public class Textifier extends Printer {
/**
* Constructs a new {@link Textifier}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
protected Textifier(final int api) {
super(api);
}
/**
- * Prints a disassembled view of the given class to the standard output. <p>
- * Usage: Textifier [-debug] &lt;binary class name or class
- * file name &gt;
+ * Prints a disassembled view of the given class to the standard output.
+ * <p>
+ * Usage: Textifier [-debug] &lt;binary class name or class file name &gt;
*
- * @param args the command line arguments.
+ * @param args
+ * the command line arguments.
*
- * @throws Exception if the class cannot be found, or if an IO exception
- * occurs.
+ * @throws Exception
+ * if the class cannot be found, or if an IO exception occurs.
*/
public static void main(final String[] args) throws Exception {
int i = 0;
@@ -182,21 +184,20 @@ public class Textifier extends Printer {
}
}
if (!ok) {
- System.err.println("Prints a disassembled view of the given class.");
+ System.err
+ .println("Prints a disassembled view of the given class.");
System.err.println("Usage: Textifier [-debug] "
+ "<fully qualified class name or class file name>");
return;
}
ClassReader cr;
if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
- || args[i].indexOf('/') > -1)
- {
+ || args[i].indexOf('/') > -1) {
cr = new ClassReader(new FileInputStream(args[i]));
} else {
cr = new ClassReader(args[i]);
}
- cr.accept(new TraceClassVisitor(new PrintWriter(System.out)),
- flags);
+ cr.accept(new TraceClassVisitor(new PrintWriter(System.out)), flags);
}
// ------------------------------------------------------------------------
@@ -204,38 +205,27 @@ public class Textifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
int major = version & 0xFFFF;
int minor = version >>> 16;
buf.setLength(0);
- buf.append("// class version ")
- .append(major)
- .append('.')
- .append(minor)
- .append(" (")
- .append(version)
- .append(")\n");
+ buf.append("// class version ").append(major).append('.').append(minor)
+ .append(" (").append(version).append(")\n");
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
buf.append("// DEPRECATED\n");
}
- buf.append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ buf.append("// access flags 0x")
+ .append(Integer.toHexString(access).toUpperCase()).append('\n');
appendDescriptor(CLASS_SIGNATURE, signature);
if (signature != null) {
TraceSignatureVisitor sv = new TraceSignatureVisitor(access);
SignatureReader r = new SignatureReader(signature);
r.accept(sv);
- buf.append("// declaration: ")
- .append(name)
- .append(sv.getDeclaration())
- .append('\n');
+ buf.append("// declaration: ").append(name)
+ .append(sv.getDeclaration()).append('\n');
}
appendAccess(access & ~Opcodes.ACC_SUPER);
@@ -269,15 +259,11 @@ public class Textifier extends Printer {
public void visitSource(final String file, final String debug) {
buf.setLength(0);
if (file != null) {
- buf.append(tab)
- .append("// compiled from: ")
- .append(file)
+ buf.append(tab).append("// compiled from: ").append(file)
.append('\n');
}
if (debug != null) {
- buf.append(tab)
- .append("// debug info: ")
- .append(debug)
+ buf.append(tab).append("// debug info: ").append(debug)
.append('\n');
}
if (buf.length() > 0) {
@@ -286,11 +272,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
buf.setLength(0);
buf.append(tab).append("OUTERCLASS ");
appendDescriptor(INTERNAL_NAME, owner);
@@ -304,10 +287,8 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitClassAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitClassAnnotation(final String desc,
+ final boolean visible) {
text.add("\n");
return visitAnnotation(desc, visible);
}
@@ -319,15 +300,13 @@ public class Textifier extends Printer {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
buf.setLength(0);
buf.append(tab).append("// access flags 0x");
- buf.append(Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase()).append('\n');
+ buf.append(
+ Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase())
+ .append('\n');
buf.append(tab);
appendAccess(access);
buf.append("INNERCLASS ");
@@ -341,19 +320,15 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public Textifier visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
buf.setLength(0);
buf.append('\n');
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
buf.append(tab).append("// DEPRECATED\n");
}
- buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ buf.append(tab).append("// access flags 0x")
+ .append(Integer.toHexString(access).toUpperCase()).append('\n');
if (signature != null) {
buf.append(tab);
appendDescriptor(FIELD_SIGNATURE, signature);
@@ -361,10 +336,8 @@ public class Textifier extends Printer {
TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
SignatureReader r = new SignatureReader(signature);
r.acceptType(sv);
- buf.append(tab)
- .append("// declaration: ")
- .append(sv.getDeclaration())
- .append('\n');
+ buf.append(tab).append("// declaration: ")
+ .append(sv.getDeclaration()).append('\n');
}
buf.append(tab);
@@ -390,19 +363,15 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public Textifier visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
buf.setLength(0);
buf.append('\n');
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
buf.append(tab).append("// DEPRECATED\n");
}
- buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ buf.append(tab).append("// access flags 0x")
+ .append(Integer.toHexString(access).toUpperCase()).append('\n');
if (signature != null) {
buf.append(tab);
@@ -415,12 +384,8 @@ public class Textifier extends Printer {
String genericReturn = v.getReturnType();
String genericExceptions = v.getExceptions();
- buf.append(tab)
- .append("// declaration: ")
- .append(genericReturn)
- .append(' ')
- .append(name)
- .append(genericDecl);
+ buf.append(tab).append("// declaration: ").append(genericReturn)
+ .append(' ').append(name).append(genericDecl);
if (genericExceptions != null) {
buf.append(" throws ").append(genericExceptions);
}
@@ -593,11 +558,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
buf.setLength(0);
appendComa(valueNumber++);
if (name != null) {
@@ -609,10 +571,7 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitAnnotation(
- final String name,
- final String desc)
- {
+ public Textifier visitAnnotation(final String name, final String desc) {
buf.setLength(0);
appendComa(valueNumber++);
if (name != null) {
@@ -629,9 +588,7 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitArray(
- final String name)
- {
+ public Textifier visitArray(final String name) {
buf.setLength(0);
appendComa(valueNumber++);
if (name != null) {
@@ -654,10 +611,8 @@ public class Textifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public Textifier visitFieldAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitFieldAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@@ -684,19 +639,14 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitMethodAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitMethodAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@Override
- public Textifier visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public Textifier visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
buf.setLength(0);
buf.append(tab2).append('@');
appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -730,40 +680,35 @@ public class Textifier extends Printer {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
buf.setLength(0);
buf.append(ltab);
buf.append("FRAME ");
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- buf.append("FULL [");
- appendFrameTypes(nLocal, local);
- buf.append("] [");
- appendFrameTypes(nStack, stack);
- buf.append(']');
- break;
- case Opcodes.F_APPEND:
- buf.append("APPEND [");
- appendFrameTypes(nLocal, local);
- buf.append(']');
- break;
- case Opcodes.F_CHOP:
- buf.append("CHOP ").append(nLocal);
- break;
- case Opcodes.F_SAME:
- buf.append("SAME");
- break;
- case Opcodes.F_SAME1:
- buf.append("SAME1 ");
- appendFrameTypes(1, stack);
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ buf.append("FULL [");
+ appendFrameTypes(nLocal, local);
+ buf.append("] [");
+ appendFrameTypes(nStack, stack);
+ buf.append(']');
+ break;
+ case Opcodes.F_APPEND:
+ buf.append("APPEND [");
+ appendFrameTypes(nLocal, local);
+ buf.append(']');
+ break;
+ case Opcodes.F_CHOP:
+ buf.append("CHOP ").append(nLocal);
+ break;
+ case Opcodes.F_SAME:
+ buf.append("SAME");
+ break;
+ case Opcodes.F_SAME1:
+ buf.append("SAME1 ");
+ appendFrameTypes(1, stack);
+ break;
}
buf.append('\n');
text.add(buf.toString());
@@ -782,20 +727,15 @@ public class Textifier extends Printer {
buf.append(tab2)
.append(OPCODES[opcode])
.append(' ')
- .append(opcode == Opcodes.NEWARRAY
- ? TYPES[operand]
- : Integer.toString(operand))
- .append('\n');
+ .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer
+ .toString(operand)).append('\n');
text.add(buf.toString());
}
@Override
public void visitVarInsn(final int opcode, final int var) {
buf.setLength(0);
- buf.append(tab2)
- .append(OPCODES[opcode])
- .append(' ')
- .append(var)
+ buf.append(tab2).append(OPCODES[opcode]).append(' ').append(var)
.append('\n');
text.add(buf.toString());
}
@@ -810,12 +750,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
buf.append(tab2).append(OPCODES[opcode]).append(' ');
appendDescriptor(INTERNAL_NAME, owner);
@@ -826,12 +762,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
buf.append(tab2).append(OPCODES[opcode]).append(' ');
appendDescriptor(INTERNAL_NAME, owner);
@@ -842,12 +774,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
buf.setLength(0);
buf.append(tab2).append("INVOKEDYNAMIC").append(' ');
buf.append(name);
@@ -855,11 +783,11 @@ public class Textifier extends Printer {
buf.append(" [");
appendHandle(bsm);
buf.append(tab3).append("// arguments:");
- if(bsmArgs.length == 0) {
+ if (bsmArgs.length == 0) {
buf.append(" none");
} else {
buf.append('\n').append(tab3);
- for(int i = 0; i < bsmArgs.length; i++) {
+ for (int i = 0; i < bsmArgs.length; i++) {
Object cst = bsmArgs[i];
if (cst instanceof String) {
Printer.appendString(buf, (String) cst);
@@ -915,22 +843,14 @@ public class Textifier extends Printer {
@Override
public void visitIincInsn(final int var, final int increment) {
buf.setLength(0);
- buf.append(tab2)
- .append("IINC ")
- .append(var)
- .append(' ')
- .append(increment)
- .append('\n');
+ buf.append(tab2).append("IINC ").append(var).append(' ')
+ .append(increment).append('\n');
text.add(buf.toString());
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
buf.setLength(0);
buf.append(tab2).append("TABLESWITCH\n");
for (int i = 0; i < labels.length; ++i) {
@@ -945,11 +865,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
buf.setLength(0);
buf.append(tab2).append("LOOKUPSWITCH\n");
for (int i = 0; i < labels.length; ++i) {
@@ -973,12 +890,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
buf.setLength(0);
buf.append(tab2).append("TRYCATCHBLOCK ");
appendLabel(start);
@@ -993,14 +906,9 @@ public class Textifier extends Printer {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
buf.setLength(0);
buf.append(tab2).append("LOCALVARIABLE ").append(name).append(' ');
appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -1017,10 +925,8 @@ public class Textifier extends Printer {
TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
SignatureReader r = new SignatureReader(signature);
r.acceptType(sv);
- buf.append(tab2)
- .append("// declaration: ")
- .append(sv.getDeclaration())
- .append('\n');
+ buf.append(tab2).append("// declaration: ")
+ .append(sv.getDeclaration()).append('\n');
}
text.add(buf.toString());
}
@@ -1056,14 +962,13 @@ public class Textifier extends Printer {
/**
* Prints a disassembled view of the given annotation.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values.
*/
- public Textifier visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitAnnotation(final String desc, final boolean visible) {
buf.setLength(0);
buf.append(tab).append('@');
appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -1078,7 +983,8 @@ public class Textifier extends Printer {
/**
* Prints a disassembled view of the given attribute.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(final Attribute attr) {
buf.setLength(0);
@@ -1111,15 +1017,16 @@ public class Textifier extends Printer {
* Appends an internal name, a type descriptor or a type signature to
* {@link #buf buf}.
*
- * @param type indicates if desc is an internal name, a field descriptor, a
- * method descriptor, a class signature, ...
- * @param desc an internal name, type descriptor, or type signature. May be
- * <tt>null</tt>.
+ * @param type
+ * indicates if desc is an internal name, a field descriptor, a
+ * method descriptor, a class signature, ...
+ * @param desc
+ * an internal name, type descriptor, or type signature. May be
+ * <tt>null</tt>.
*/
protected void appendDescriptor(final int type, final String desc) {
if (type == CLASS_SIGNATURE || type == FIELD_SIGNATURE
- || type == METHOD_SIGNATURE)
- {
+ || type == METHOD_SIGNATURE) {
if (desc != null) {
buf.append("// signature ").append(desc).append('\n');
}
@@ -1132,7 +1039,8 @@ public class Textifier extends Printer {
* Appends the name of the given label to {@link #buf buf}. Creates a new
* label name if the given label does not yet have one.
*
- * @param l a label.
+ * @param l
+ * a label.
*/
protected void appendLabel(final Label l) {
if (labelNames == null) {
@@ -1149,40 +1057,42 @@ public class Textifier extends Printer {
/**
* Appends the information about the given handle to {@link #buf buf}.
*
- * @param h a handle, non null.
+ * @param h
+ * a handle, non null.
*/
protected void appendHandle(final Handle h) {
buf.append('\n').append(tab3);
int tag = h.getTag();
- buf.append("// handle kind 0x").append(Integer.toHexString(tag)).append(" : ");
+ buf.append("// handle kind 0x").append(Integer.toHexString(tag))
+ .append(" : ");
switch (tag) {
- case Opcodes.H_GETFIELD:
- buf.append("GETFIELD");
- break;
- case Opcodes.H_GETSTATIC:
- buf.append("GETSTATIC");
- break;
- case Opcodes.H_PUTFIELD:
- buf.append("PUTFIELD");
- break;
- case Opcodes.H_PUTSTATIC:
- buf.append("PUTSTATIC");
- break;
- case Opcodes.H_INVOKEINTERFACE:
- buf.append("INVOKEINTERFACE");
- break;
- case Opcodes.H_INVOKESPECIAL:
- buf.append("INVOKESPECIAL");
- break;
- case Opcodes.H_INVOKESTATIC:
- buf.append("INVOKESTATIC");
- break;
- case Opcodes.H_INVOKEVIRTUAL:
- buf.append("INVOKEVIRTUAL");
- break;
- case Opcodes.H_NEWINVOKESPECIAL:
- buf.append("NEWINVOKESPECIAL");
- break;
+ case Opcodes.H_GETFIELD:
+ buf.append("GETFIELD");
+ break;
+ case Opcodes.H_GETSTATIC:
+ buf.append("GETSTATIC");
+ break;
+ case Opcodes.H_PUTFIELD:
+ buf.append("PUTFIELD");
+ break;
+ case Opcodes.H_PUTSTATIC:
+ buf.append("PUTSTATIC");
+ break;
+ case Opcodes.H_INVOKEINTERFACE:
+ buf.append("INVOKEINTERFACE");
+ break;
+ case Opcodes.H_INVOKESPECIAL:
+ buf.append("INVOKESPECIAL");
+ break;
+ case Opcodes.H_INVOKESTATIC:
+ buf.append("INVOKESTATIC");
+ break;
+ case Opcodes.H_INVOKEVIRTUAL:
+ buf.append("INVOKEVIRTUAL");
+ break;
+ case Opcodes.H_NEWINVOKESPECIAL:
+ buf.append("NEWINVOKESPECIAL");
+ break;
}
buf.append('\n');
buf.append(tab3);
@@ -1195,10 +1105,11 @@ public class Textifier extends Printer {
}
/**
- * Appends a string representation of the given access modifiers to {@link
- * #buf buf}.
+ * Appends a string representation of the given access modifiers to
+ * {@link #buf buf}.
*
- * @param access some access modifiers.
+ * @param access
+ * some access modifiers.
*/
private void appendAccess(final int access) {
if ((access & Opcodes.ACC_PUBLIC) != 0) {
@@ -1231,6 +1142,9 @@ public class Textifier extends Printer {
if ((access & Opcodes.ACC_STRICT) != 0) {
buf.append("strictfp ");
}
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ buf.append("synthetic ");
+ }
if ((access & Opcodes.ACC_ENUM) != 0) {
buf.append("enum ");
}
@@ -1256,27 +1170,27 @@ public class Textifier extends Printer {
}
} else if (o[i] instanceof Integer) {
switch (((Integer) o[i]).intValue()) {
- case 0:
- appendDescriptor(FIELD_DESCRIPTOR, "T");
- break;
- case 1:
- appendDescriptor(FIELD_DESCRIPTOR, "I");
- break;
- case 2:
- appendDescriptor(FIELD_DESCRIPTOR, "F");
- break;
- case 3:
- appendDescriptor(FIELD_DESCRIPTOR, "D");
- break;
- case 4:
- appendDescriptor(FIELD_DESCRIPTOR, "J");
- break;
- case 5:
- appendDescriptor(FIELD_DESCRIPTOR, "N");
- break;
- case 6:
- appendDescriptor(FIELD_DESCRIPTOR, "U");
- break;
+ case 0:
+ appendDescriptor(FIELD_DESCRIPTOR, "T");
+ break;
+ case 1:
+ appendDescriptor(FIELD_DESCRIPTOR, "I");
+ break;
+ case 2:
+ appendDescriptor(FIELD_DESCRIPTOR, "F");
+ break;
+ case 3:
+ appendDescriptor(FIELD_DESCRIPTOR, "D");
+ break;
+ case 4:
+ appendDescriptor(FIELD_DESCRIPTOR, "J");
+ break;
+ case 5:
+ appendDescriptor(FIELD_DESCRIPTOR, "N");
+ break;
+ case 6:
+ appendDescriptor(FIELD_DESCRIPTOR, "U");
+ break;
}
} else {
appendLabel((Label) o[i]);
diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
index f112609031..33e7cf0b26 100644
--- a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
@@ -58,33 +58,26 @@ public final class TraceAnnotationVisitor extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
p.visitEnum(name, desc, value);
super.visitEnum(name, desc, value);
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
Printer p = this.p.visitAnnotation(name, desc);
- AnnotationVisitor av = this.av == null
- ? null
- : this.av.visitAnnotation(name, desc);
+ AnnotationVisitor av = this.av == null ? null : this.av
+ .visitAnnotation(name, desc);
return new TraceAnnotationVisitor(av, p);
}
@Override
public AnnotationVisitor visitArray(final String name) {
Printer p = this.p.visitArray(name);
- AnnotationVisitor av = this.av == null
- ? null
- : this.av.visitArray(name);
+ AnnotationVisitor av = this.av == null ? null : this.av
+ .visitArray(name);
return new TraceAnnotationVisitor(av, p);
}
diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
index bb830b71ce..ff7a017482 100644
--- a/src/asm/scala/tools/asm/util/TraceClassVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
@@ -42,30 +42,41 @@ import scala.tools.asm.Opcodes;
* A {@link ClassVisitor} that prints the classes it visits with a
* {@link Printer}. This class visitor can be used in the middle of a class
* visitor chain to trace the class that is visited at a given point in this
- * chain. This may be useful for debugging purposes. <p> The trace printed when
- * visiting the <tt>Hello</tt> class is the following: <p> <blockquote>
- *
- * <pre> // class version 49.0 (49) // access flags 0x21 public class Hello {
- *
+ * chain. This may be useful for debugging purposes.
+ * <p>
+ * The trace printed when visiting the <tt>Hello</tt> class is the following:
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * // class version 49.0 (49) // access flags 0x21 public class Hello {
+ *
* // compiled from: Hello.java
- *
+ *
* // access flags 0x1 public &lt;init&gt; ()V ALOAD 0 INVOKESPECIAL
* java/lang/Object &lt;init&gt; ()V RETURN MAXSTACK = 1 MAXLOCALS = 1
- *
+ *
* // access flags 0x9 public static main ([Ljava/lang/String;)V GETSTATIC
* java/lang/System out Ljava/io/PrintStream; LDC &quot;hello&quot;
* INVOKEVIRTUAL java/io/PrintStream println (Ljava/lang/String;)V RETURN
- * MAXSTACK = 2 MAXLOCALS = 1 } </pre>
- *
- * </blockquote> where <tt>Hello</tt> is defined by: <p> <blockquote>
- *
- * <pre> public class Hello {
- *
- * public static void main(String[] args) {
- * System.out.println(&quot;hello&quot;); } } </pre>
- *
+ * MAXSTACK = 2 MAXLOCALS = 1 }
+ * </pre>
+ *
+ * </blockquote> where <tt>Hello</tt> is defined by:
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * public class Hello {
+ *
+ * public static void main(String[] args) {
+ * System.out.println(&quot;hello&quot;);
+ * }
+ * }
+ * </pre>
+ *
* </blockquote>
- *
+ *
* @author Eric Bruneton
* @author Eugene Kuleshov
*/
@@ -83,8 +94,9 @@ public final class TraceClassVisitor extends ClassVisitor {
/**
* Constructs a new {@link TraceClassVisitor}.
- *
- * @param pw the print writer to be used to print the class.
+ *
+ * @param pw
+ * the print writer to be used to print the class.
*/
public TraceClassVisitor(final PrintWriter pw) {
this(null, pw);
@@ -92,10 +104,12 @@ public final class TraceClassVisitor extends ClassVisitor {
/**
* Constructs a new {@link TraceClassVisitor}.
- *
- * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
- * May be <tt>null</tt>.
- * @param pw the print writer to be used to print the class.
+ *
+ * @param cv
+ * the {@link ClassVisitor} to which this visitor delegates
+ * calls. May be <tt>null</tt>.
+ * @param pw
+ * the print writer to be used to print the class.
*/
public TraceClassVisitor(final ClassVisitor cv, final PrintWriter pw) {
this(cv, new Textifier(), pw);
@@ -103,33 +117,28 @@ public final class TraceClassVisitor extends ClassVisitor {
/**
* Constructs a new {@link TraceClassVisitor}.
- *
- * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
- * May be <tt>null</tt>.
- * @param p the object that actually converts visit events into text.
- * @param pw the print writer to be used to print the class. May be null if
- * you simply want to use the result via
- * {@link Printer#getText()}, instead of printing it.
+ *
+ * @param cv
+ * the {@link ClassVisitor} to which this visitor delegates
+ * calls. May be <tt>null</tt>.
+ * @param p
+ * the object that actually converts visit events into text.
+ * @param pw
+ * the print writer to be used to print the class. May be null if
+ * you simply want to use the result via
+ * {@link Printer#getText()}, instead of printing it.
*/
- public TraceClassVisitor(
- final ClassVisitor cv,
- final Printer p,
- final PrintWriter pw)
- {
+ public TraceClassVisitor(final ClassVisitor cv, final Printer p,
+ final PrintWriter pw) {
super(Opcodes.ASM4, cv);
this.pw = pw;
this.p = p;
}
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
p.visit(version, access, name, signature, superName, interfaces);
super.visit(version, access, name, signature, superName, interfaces);
}
@@ -141,20 +150,15 @@ public final class TraceClassVisitor extends ClassVisitor {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
p.visitOuterClass(owner, name, desc);
super.visitOuterClass(owner, name, desc);
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
Printer p = this.p.visitClassAnnotation(desc, visible);
AnnotationVisitor av = cv == null ? null : cv.visitAnnotation(desc,
visible);
@@ -168,55 +172,28 @@ public final class TraceClassVisitor extends ClassVisitor {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
p.visitInnerClass(name, outerName, innerName, access);
super.visitInnerClass(name, outerName, innerName, access);
}
@Override
- public FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
- Printer p = this.p.visitField(access,
- name,
- desc,
- signature,
- value);
- FieldVisitor fv = cv == null ? null : cv.visitField(access,
- name,
- desc,
- signature,
- value);
+ public FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
+ Printer p = this.p.visitField(access, name, desc, signature, value);
+ FieldVisitor fv = cv == null ? null : cv.visitField(access, name, desc,
+ signature, value);
return new TraceFieldVisitor(fv, p);
}
@Override
- public MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
- Printer p = this.p.visitMethod(access,
- name,
- desc,
- signature,
- exceptions);
- MethodVisitor mv = cv == null ? null : cv.visitMethod(access,
- name,
- desc,
- signature,
+ public MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
+ Printer p = this.p.visitMethod(access, name, desc, signature,
exceptions);
+ MethodVisitor mv = cv == null ? null : cv.visitMethod(access, name,
+ desc, signature, exceptions);
return new TraceMethodVisitor(mv, p);
}
diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
index f537e83be1..9547a70008 100644
--- a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
@@ -37,7 +37,7 @@ import scala.tools.asm.Opcodes;
/**
* A {@link FieldVisitor} that prints the fields it visits with a
* {@link Printer}.
- *
+ *
* @author Eric Bruneton
*/
public final class TraceFieldVisitor extends FieldVisitor {
@@ -52,12 +52,10 @@ public final class TraceFieldVisitor extends FieldVisitor {
super(Opcodes.ASM4, fv);
this.p = p;
}
-
+
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
Printer p = this.p.visitFieldAnnotation(desc, visible);
AnnotationVisitor av = fv == null ? null : fv.visitAnnotation(desc,
visible);
diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
index 9aabf2079e..9034567c8f 100644
--- a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
@@ -56,10 +56,8 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
Printer p = this.p.visitMethodAnnotation(desc, visible);
AnnotationVisitor av = mv == null ? null : mv.visitAnnotation(desc,
visible);
@@ -80,17 +78,11 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
- Printer p = this.p.visitParameterAnnotation(parameter,
- desc,
- visible);
- AnnotationVisitor av = mv == null
- ? null
- : mv.visitParameterAnnotation(parameter, desc, visible);
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
+ Printer p = this.p.visitParameterAnnotation(parameter, desc, visible);
+ AnnotationVisitor av = mv == null ? null : mv.visitParameterAnnotation(
+ parameter, desc, visible);
return new TraceAnnotationVisitor(av, p);
}
@@ -101,13 +93,8 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
p.visitFrame(type, nLocal, local, nStack, stack);
super.visitFrame(type, nLocal, local, nStack, stack);
}
@@ -137,34 +124,22 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
p.visitFieldInsn(opcode, owner, name, desc);
super.visitFieldInsn(opcode, owner, name, desc);
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
p.visitMethodInsn(opcode, owner, name, desc);
super.visitMethodInsn(opcode, owner, name, desc);
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
p.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
}
@@ -194,22 +169,15 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
p.visitTableSwitchInsn(min, max, dflt, labels);
super.visitTableSwitchInsn(min, max, dflt, labels);
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
p.visitLookupSwitchInsn(dflt, keys, labels);
super.visitLookupSwitchInsn(dflt, keys, labels);
}
@@ -221,25 +189,16 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
p.visitTryCatchBlock(start, end, handler, type);
super.visitTryCatchBlock(start, end, handler, type);
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
p.visitLocalVariable(name, desc, signature, start, end, index);
super.visitLocalVariable(name, desc, signature, start, end, index);
}
diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
index a37b759811..1e23c7ef1a 100644
--- a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
@@ -117,8 +117,7 @@ public final class TraceSignatureVisitor extends SignatureVisitor {
@Override
public SignatureVisitor visitInterface() {
- separator = seenInterface ? ", " : isInterface
- ? " extends "
+ separator = seenInterface ? ", " : isInterface ? " extends "
: " implements ";
seenInterface = true;
startType();
@@ -165,34 +164,34 @@ public final class TraceSignatureVisitor extends SignatureVisitor {
@Override
public void visitBaseType(final char descriptor) {
switch (descriptor) {
- case 'V':
- declaration.append("void");
- break;
- case 'B':
- declaration.append("byte");
- break;
- case 'J':
- declaration.append("long");
- break;
- case 'Z':
- declaration.append("boolean");
- break;
- case 'I':
- declaration.append("int");
- break;
- case 'S':
- declaration.append("short");
- break;
- case 'C':
- declaration.append("char");
- break;
- case 'F':
- declaration.append("float");
- break;
- // case 'D':
- default:
- declaration.append("double");
- break;
+ case 'V':
+ declaration.append("void");
+ break;
+ case 'B':
+ declaration.append("byte");
+ break;
+ case 'J':
+ declaration.append("long");
+ break;
+ case 'Z':
+ declaration.append("boolean");
+ break;
+ case 'I':
+ declaration.append("int");
+ break;
+ case 'S':
+ declaration.append("short");
+ break;
+ case 'C':
+ declaration.append("char");
+ break;
+ case 'F':
+ declaration.append("float");
+ break;
+ // case 'D':
+ default:
+ declaration.append("double");
+ break;
}
endType();
}
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index b9511c1ad2..aec840c262 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -63,7 +63,7 @@ object genprod extends App {
def header = """
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/reflect/macros/runtime/Context.scala b/src/compiler/scala/reflect/macros/runtime/Context.scala
index 8e8b0fcea1..76c684f6d7 100644
--- a/src/compiler/scala/reflect/macros/runtime/Context.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Context.scala
@@ -14,6 +14,7 @@ abstract class Context extends scala.reflect.macros.Context
with Parsers
with Evals
with ExprUtils
+ with Synthetics
with Traces {
val universe: Global
diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
index be5f2dbe83..8fe0b09700 100644
--- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
@@ -1,23 +1,35 @@
package scala.reflect.macros
package runtime
+import scala.reflect.{ClassTag, classTag}
+
trait Enclosures {
self: Context =>
import universe._
- import mirror._
- private def site = callsiteTyper.context
- private def enclTrees = site.enclosingContextChain map (_.tree)
- private def enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+ type MacroRole = analyzer.MacroRole
+ def APPLY_ROLE = analyzer.APPLY_ROLE
+ def macroRole: MacroRole
+
+ private lazy val site = callsiteTyper.context
+ private lazy val enclTrees = site.enclosingContextChain map (_.tree)
+ private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+
+ private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree
+ private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw new EnclosureException(classTag[T].runtimeClass, enclTrees))
// vals are eager to simplify debugging
// after all we wouldn't save that much time by making them lazy
val macroApplication: Tree = expandee
- val enclosingClass: Tree = enclTrees collectFirst { case x: ImplDef => x } getOrElse EmptyTree
+ def enclosingPackage: PackageDef = strictEnclosure[PackageDef]
+ val enclosingClass: Tree = lenientEnclosure[ImplDef]
+ def enclosingImpl: ImplDef = strictEnclosure[ImplDef]
+ def enclosingTemplate: Template = strictEnclosure[Template]
val enclosingImplicits: List[(Type, Tree)] = site.openImplicits
val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
- val enclosingMethod: Tree = site.enclMethod.tree
+ val enclosingMethod: Tree = lenientEnclosure[DefDef]
+ def enclosingDef: DefDef = strictEnclosure[DefDef]
val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
val enclosingRun: Run = universe.currentRun
diff --git a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala b/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
index 672699f00e..a719beed97 100644
--- a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
+++ b/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
@@ -5,7 +5,6 @@ trait ExprUtils {
self: Context =>
import universe._
- import mirror._
def literalNull = Expr[Null](Literal(Constant(null)))(TypeTag.Null)
diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/runtime/Names.scala
index ee9f3a56d3..635e8bcd45 100644
--- a/src/compiler/scala/reflect/macros/runtime/Names.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Names.scala
@@ -7,11 +7,20 @@ trait Names {
lazy val freshNameCreator = callsiteTyper.context.unit.fresh
def fresh(): String =
- freshNameCreator.newName()
+ freshName()
def fresh(name: String): String =
- freshNameCreator.newName(name)
+ freshName(name)
def fresh[NameType <: Name](name: NameType): NameType =
+ freshName[NameType](name)
+
+ def freshName(): String =
+ freshNameCreator.newName()
+
+ def freshName(name: String): String =
+ freshNameCreator.newName(name)
+
+ def freshName[NameType <: Name](name: NameType): NameType =
name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Synthetics.scala b/src/compiler/scala/reflect/macros/runtime/Synthetics.scala
new file mode 100644
index 0000000000..73f3ab8d20
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Synthetics.scala
@@ -0,0 +1,83 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.reflect.macros
+package runtime
+
+import java.util.UUID._
+import scala.reflect.internal.Flags._
+import scala.reflect.internal.util.BatchSourceFile
+import scala.reflect.io.VirtualFile
+
+trait Synthetics {
+ self: Context =>
+
+ import global._
+ import mirror.wrapMissing
+
+ // getClassIfDefined and getModuleIfDefined cannot be used here
+ // because they don't work for stuff declared in the empty package
+ // (as specified in SLS, code inside non-empty packages cannot see
+ // declarations from the empty package, so compiler internals
+ // default to ignoring contents of the empty package)
+ // to the contrast, staticModule and staticClass are designed
+ // to be a part of the reflection API and, therefore, they
+ // correctly resolve all names
+ private def topLevelSymbol(name: Name): Symbol = wrapMissing {
+ if (name.isTermName) mirror.staticModule(name.toString)
+ else mirror.staticClass(name.toString)
+ }
+
+ def topLevelDef(name: Name): Tree =
+ enclosingRun.units.toList.map(_.body).flatMap {
+ // it's okay to check `stat.symbol` here, because currently macros expand strictly after namer
+ // which means that by the earliest time one can call this method all top-level definitions will have already been entered
+ case PackageDef(_, stats) => stats filter (stat => stat.symbol != NoSymbol && stat.symbol == topLevelSymbol(name))
+ case _ => Nil // should never happen, but better be safe than sorry
+ }.headOption getOrElse EmptyTree
+
+ def topLevelRef(name: Name): Tree = {
+ if (topLevelDef(name).nonEmpty) gen.mkUnattributedRef(name)
+ else EmptyTree
+ }
+
+ // TODO: provide a way to specify a pretty name for debugging purposes
+ private def randomFileName() = (
+ "macroSynthetic-" + randomUUID().toString.replace("-", "") + ".scala"
+ )
+
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: universe.ImplDef): RefTree =
+ introduceTopLevel(packagePrototype, List(definition)).head
+
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: universe.ImplDef*): List[RefTree] =
+ introduceTopLevel(packagePrototype, definitions.toList)
+
+ private def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: List[universe.ImplDef]): List[RefTree] = {
+ val code @ PackageDef(pid, _) = implicitly[PackageSpec[T]].mkPackageDef(packagePrototype, definitions)
+ val syntheticFileName = randomFileName()
+ // compatibility with SBT
+ // on the one hand, we need to specify some jfile here, otherwise sbt crashes with an NPE (SI-6870)
+ // on the other hand, we can't specify the obvious enclosingUnit, because then sbt somehow fails to run tests using type macros
+ // okay, now let's specify a guaranteedly non-existent file in an existing directory (so that we don't run into permission problems)
+ val relatedJfile = enclosingUnit.source.file.file
+ val fakeJfile = if (relatedJfile != null) new java.io.File(relatedJfile.getParent, syntheticFileName) else null
+ val virtualFile = new VirtualFile(syntheticFileName) { override def file = fakeJfile }
+ val sourceFile = new BatchSourceFile(virtualFile, code.toString)
+ val unit = new CompilationUnit(sourceFile)
+ unit.body = code
+ universe.currentRun.compileLate(unit)
+ definitions map (definition => Select(pid, definition.name))
+ }
+
+ protected def mkPackageDef(name: String, stats: List[Tree]) = gen.mkPackageDef(name, stats)
+
+ protected def mkPackageDef(name: TermName, stats: List[Tree]) = gen.mkPackageDef(name.toString, stats)
+
+ protected def mkPackageDef(tree: RefTree, stats: List[Tree]) = PackageDef(tree, stats)
+
+ protected def mkPackageDef(sym: Symbol, stats: List[Tree]) = {
+ assert(sym hasFlag PACKAGE, s"expected a package or package class symbol, found: $sym")
+ gen.mkPackageDef(sym.fullName.toString, stats)
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/runtime/Typers.scala
index f9add91b9a..7e268247dd 100644
--- a/src/compiler/scala/reflect/macros/runtime/Typers.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Typers.scala
@@ -1,6 +1,8 @@
package scala.reflect.macros
package runtime
+import scala.reflect.internal.Mode
+
trait Typers {
self: Context =>
@@ -22,7 +24,7 @@ trait Typers {
// typechecking uses silent anyways (e.g. in typedSelect), so you'll only waste your time
// I'd advise fixing the root cause: finding why the context is not set to report errors
// (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
- wrapper(callsiteTyper.silent(_.typed(tree, universe.analyzer.EXPRmode, pt)) match {
+ wrapper(callsiteTyper.silent(_.typed(tree, Mode.EXPRmode, pt)) match {
case universe.analyzer.SilentResultValue(result) =>
macroLogVerbose(result)
result
@@ -62,4 +64,4 @@ trait Typers {
def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(tree)
def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(tree)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 2e57bc59a8..860dfd72b2 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -7,7 +7,6 @@ trait Errors {
self: Reifier =>
import global._
- import definitions._
def defaultErrorPosition = {
val stack = currents collect { case t: Tree if t.pos != NoPosition => t.pos }
@@ -22,11 +21,6 @@ trait Errors {
throw new ReificationException(defaultErrorPosition, msg)
}
- def CannotReifySymbol(sym: Symbol) = {
- val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString)
- throw new ReificationException(defaultErrorPosition, msg)
- }
-
def CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt: CompoundTypeTree) = {
val msg = "implementation restriction: cannot reify refinement type trees with non-empty bodies"
throw new ReificationException(ctt.pos, msg)
diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala
index 1710cae2a5..d43532090c 100644
--- a/src/compiler/scala/reflect/reify/Phases.scala
+++ b/src/compiler/scala/reflect/reify/Phases.scala
@@ -10,7 +10,6 @@ trait Phases extends Reshape
self: Reifier =>
import global._
- import definitions._
private var alreadyRun = false
@@ -26,7 +25,7 @@ trait Phases extends Reshape
if (reifyDebug) println("[reshape phase]")
tree = reshape.transform(tree)
if (reifyDebug) println("[interlude]")
- if (reifyDebug) println("reifee = " + (if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
+ if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
if (reifyDebug) println("[calculate phase]")
calculate.traverse(tree)
@@ -41,4 +40,4 @@ trait Phases extends Reshape
result
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 47669f57b0..9cf069fe98 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -6,9 +6,9 @@ import scala.reflect.macros.UnexpectedReificationException
import scala.reflect.reify.utils.Utils
/** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
- * See more info in the comments to ``reify'' in scala.reflect.api.Universe.
+ * See more info in the comments to `reify` in scala.reflect.api.Universe.
*
- * @author Martin Odersky
+ * @author Martin Odersky
* @version 2.10
*/
abstract class Reifier extends States
@@ -32,7 +32,7 @@ abstract class Reifier extends States
override def hasReifier = true
/**
- * For ``reifee'' and other reification parameters, generate a tree of the form
+ * For `reifee` and other reification parameters, generate a tree of the form
*
* {
* val $u: universe.type = <[ universe ]>
@@ -57,7 +57,7 @@ abstract class Reifier extends States
val result = reifee match {
case tree: Tree =>
- reifyTrace("reifying = ")(if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
+ reifyTrace("reifying = ")(if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
reifyTrace("reifee is located at: ")(tree.pos)
reifyTrace("universe = ")(universe)
reifyTrace("mirror = ")(mirror)
diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala
index 58455c9f3c..29bfa19845 100644
--- a/src/compiler/scala/reflect/reify/States.scala
+++ b/src/compiler/scala/reflect/reify/States.scala
@@ -4,7 +4,6 @@ trait States {
self: Reifier =>
import global._
- import definitions._
/** Encapsulates reifier state
*
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index cbaee41890..9659134e5b 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -8,7 +8,6 @@ abstract class Taggers {
import c.universe._
import definitions._
- import treeBuild._
val coreTags = Map(
ByteTpe -> nme.Byte,
@@ -59,7 +58,7 @@ abstract class Taggers {
val result =
tpe match {
case coreTpe if coreTags contains coreTpe =>
- val ref = if (tagModule.owner.isPackageClass) Ident(tagModule) else Select(prefix, tagModule.name)
+ val ref = if (tagModule.isTopLevel) Ident(tagModule) else Select(prefix, tagModule.name)
Select(ref, coreTags(coreTpe))
case _ =>
translatingReificationErrors(materializer)
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index dec491aabe..5a454e1e07 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -5,7 +5,6 @@ trait GenAnnotationInfos {
self: Reifier =>
import global._
- import definitions._
// usually annotations are reified as their originals from Modifiers
// however, when reifying free and tough types, we're forced to reify annotation infos as is
@@ -52,4 +51,4 @@ trait GenAnnotationInfos {
val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
index 4abf88f475..4266c6f8d6 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
@@ -5,10 +5,9 @@ trait GenNames {
self: Reifier =>
import global._
- import definitions._
def reifyName(name: Name) = {
- val factory = if (name.isTypeName) nme.nmeNewTypeName else nme.nmeNewTermName
+ val factory = if (name.isTypeName) nme.TypeName else nme.TermName
mirrorCall(factory, Literal(Constant(name.toString)))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
index 8c5db04454..1d151c5135 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
@@ -5,7 +5,6 @@ trait GenPositions {
self: Reifier =>
import global._
- import definitions._
// we do not reify positions because this inflates resulting trees, but doesn't buy as anything
// where would one use positions? right, in error messages
@@ -14,4 +13,4 @@ trait GenPositions {
// however both macros and toolboxes have their own means to report errors in synthetic trees
def reifyPosition(pos: Position): Tree =
reifyMirrorObject(NoPosition)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 47c966ea24..67bc93d407 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -7,7 +7,6 @@ trait GenSymbols {
self: Reifier =>
import global._
- import definitions._
/** Symbol table of the reifee.
*
@@ -101,7 +100,7 @@ trait GenSymbols {
def reifyFreeTerm(binding: Tree): Tree =
reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
- val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
+ val name = newTermName("" + nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
// We need to note whether the free value being reified is stable or not to guide subsequent reflective compilation.
// Here's why reflection compilation needs our help.
//
@@ -142,14 +141,14 @@ trait GenSymbols {
reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
state.reificationIsConcrete = false
- val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
+ val name: TermName = nme.REIFY_FREE_PREFIX append sym.name
Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
}
def reifySymDef(sym: Symbol): Tree =
reifyIntoSymtab(sym) { sym =>
if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
- val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
+ val name: TermName = nme.REIFY_SYMDEF_PREFIX append sym.name
def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
}
@@ -173,7 +172,7 @@ trait GenSymbols {
val reification = reificode(sym)
import reification.{name, binding}
val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
- state.symtab += (sym, name, tree)
+ state.symtab += (sym, name.toTermName, tree)
}
fromSymtab
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 9894e359b4..78bdf7e132 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -15,7 +15,7 @@ trait GenTrees {
/**
* Reify a tree.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reifyTree(tree: Tree): Tree = {
assert(tree != null, "tree is null")
@@ -29,12 +29,12 @@ trait GenTrees {
// the idea behind the new reincarnation of reifier is a simple maxim:
//
- // never call ``reifyType'' to reify a tree
+ // never call `reifyType` to reify a tree
//
// this works because the stuff we are reifying was once represented with trees only
// and lexical scope information can be fully captured by reifying symbols
//
- // to enable this idyll, we work hard in the ``Reshape'' phase
+ // to enable this idyll, we work hard in the `Reshape` phase
// which replaces all types with equivalent trees and works around non-idempotencies of the typechecker
//
// why bother? because this brings method to the madness
@@ -45,7 +45,9 @@ trait GenTrees {
case global.EmptyTree =>
reifyMirrorObject(EmptyTree)
case global.emptyValDef =>
- mirrorBuildSelect(nme.emptyValDef)
+ mirrorSelect(nme.emptyValDef)
+ case global.pendingSuperCall =>
+ mirrorSelect(nme.pendingSuperCall)
case FreeDef(_, _, _, _, _) =>
reifyNestedFreeDef(tree)
case FreeRef(_, _) =>
@@ -63,8 +65,8 @@ trait GenTrees {
}
// usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
- // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
- if (reifyTreeSymbols && tree.hasSymbol) {
+ // however, reification of AnnotatedTypes is special. see `reifyType` to find out why.
+ if (reifyTreeSymbols && tree.hasSymbolField) {
if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
rtree = mirrorBuildCall(nme.setSymbol, rtree, reify(tree.symbol))
}
@@ -84,13 +86,13 @@ trait GenTrees {
case TreeSplice(splicee) =>
if (reifyDebug) println("splicing " + tree)
- // see ``Metalevels'' for more info about metalevel breaches
+ // see `Metalevels` for more info about metalevel breaches
// and about how we deal with splices that contain them
- val isMetalevelBreach = splicee exists (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
- val isRuntimeEval = splicee exists (sub => sub.hasSymbol && sub.symbol == ExprSplice)
+ val isMetalevelBreach = splicee exists (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ val isRuntimeEval = splicee exists (sub => sub.hasSymbolField && sub.symbol == ExprSplice)
if (isMetalevelBreach || isRuntimeEval) {
// we used to convert dynamic splices into runtime evals transparently, but we no longer do that
- // why? see comments in ``Metalevels''
+ // why? see comments in `Metalevels`
// if (reifyDebug) println("splicing has failed: cannot splice when facing a metalevel breach")
// EmptyTree
CannotReifyRuntimeSplice(tree)
@@ -100,7 +102,7 @@ trait GenTrees {
// we intentionally don't care about the prefix (the first underscore in the `RefiedTree` pattern match)
case ReifiedTree(_, _, inlinedSymtab, rtree, _, _, _) =>
if (reifyDebug) println("inlining the splicee")
- // all free vars local to the enclosing reifee should've already been inlined by ``Metalevels''
+ // all free vars local to the enclosing reifee should've already been inlined by `Metalevels`
for (sym <- inlinedSymtab.syms if sym.isLocalToReifee)
abort("local free var, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym))
state.symtab ++= inlinedSymtab
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index bb7e1f9b56..6c94726231 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -9,7 +9,7 @@ trait GenTypes {
/**
* Reify a type.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reifyType(tpe: Type): Tree = {
assert(tpe != null, "tpe is null")
@@ -73,7 +73,6 @@ trait GenTypes {
if (reifyDebug) println("splicing " + tpe)
val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
- val key = (tagFlavor, tpe.typeSymbol)
// if this fails, it might produce the dreaded "erroneous or inaccessible type" error
// to find out the whereabouts of the error run scalac with -Ydebug
if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(universe, tagFlavor, tpe))
@@ -156,7 +155,7 @@ trait GenTypes {
*/
private def reifySemiConcreteTypeMember(tpe: Type): Tree = tpe match {
case tpe @ TypeRef(pre @ SingleType(prepre, presym), sym, args) if sym.isAbstractType && !sym.isExistential =>
- return mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
+ mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
}
/** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 49877b4286..e0570d61f2 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -5,7 +5,6 @@ trait GenUtils {
self: Reifier =>
import global._
- import definitions._
def reifyList(xs: List[Any]): Tree =
mkList(xs map reify)
@@ -31,41 +30,32 @@ trait GenUtils {
def call(fname: String, args: Tree*): Tree =
Apply(termPath(fname), args.toList)
- def mirrorSelect(name: String): Tree =
- termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorSelect(name: String): Tree = termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorSelect(name: TermName): Tree = mirrorSelect(name.toString)
- def mirrorBuildSelect(name: String): Tree =
- termPath(nme.UNIVERSE_BUILD_PREFIX + name)
-
- def mirrorMirrorSelect(name: String): Tree =
- termPath(nme.MIRROR_PREFIX + name)
+ def mirrorMirrorSelect(name: TermName): Tree =
+ termPath("" + nme.MIRROR_PREFIX + name)
def mirrorCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.UNIVERSE_PREFIX append name), args: _*)
-
- def mirrorCall(name: String, args: Tree*): Tree =
- call(nme.UNIVERSE_PREFIX + name, args: _*)
+ call("" + nme.UNIVERSE_PREFIX + name, args: _*)
def mirrorBuildCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.UNIVERSE_BUILD_PREFIX append name), args: _*)
-
- def mirrorBuildCall(name: String, args: Tree*): Tree =
- call(nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
+ call("" + nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
def mirrorMirrorCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.MIRROR_PREFIX append name), args: _*)
-
- def mirrorMirrorCall(name: String, args: Tree*): Tree =
- call(nme.MIRROR_PREFIX + name, args: _*)
+ call("" + nme.MIRROR_PREFIX + name, args: _*)
def mirrorFactoryCall(value: Product, args: Tree*): Tree =
mirrorFactoryCall(value.productPrefix, args: _*)
- def mirrorFactoryCall(prefix: String, args: Tree*): Tree =
- mirrorCall(prefix, args: _*)
+ def mirrorFactoryCall(prefix: TermName, args: Tree*): Tree =
+ mirrorCall("" + prefix, args: _*)
+
+ def scalaFactoryCall(name: TermName, args: Tree*): Tree =
+ call(s"scala.$name.apply", args: _*)
def scalaFactoryCall(name: String, args: Tree*): Tree =
- call("scala." + name + ".apply", args: _*)
+ scalaFactoryCall(name: TermName, args: _*)
def mkList(args: List[Tree]): Tree =
scalaFactoryCall("collection.immutable.List", args: _*)
@@ -91,22 +81,6 @@ trait GenUtils {
/** An (unreified) path that refers to term definition with given fully qualified name */
def termPath(fullname: String): Tree = path(fullname, newTermName)
- /** An (unreified) path that refers to type definition with given fully qualified name */
- def typePath(fullname: String): Tree = path(fullname, newTypeName)
-
- def isTough(tpe: Type) = {
- def isTough(tpe: Type) = tpe match {
- case _: RefinedType => true
- case _: ExistentialType => true
- case _: ClassInfoType => true
- case _: MethodType => true
- case _: PolyType => true
- case _ => false
- }
-
- tpe != null && (tpe exists isTough)
- }
-
object TypedOrAnnotated {
def unapply(tree: Tree): Option[Tree] = tree match {
case ty @ Typed(_, _) =>
@@ -118,15 +92,6 @@ trait GenUtils {
}
}
- def isAnnotated(tpe: Type) = {
- def isAnnotated(tpe: Type) = tpe match {
- case _: AnnotatedType => true
- case _ => false
- }
-
- tpe != null && (tpe exists isAnnotated)
- }
-
def isSemiConcreteTypeMember(tpe: Type) = tpe match {
case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true
case _ => false
@@ -145,4 +110,4 @@ trait GenUtils {
if (origin == "") origin = "of unknown origin"
origin
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 55f8684df2..78f85c2634 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,11 +1,10 @@
package scala.reflect
-import scala.language.implicitConversions
-import scala.reflect.macros.{Context, ReificationException, UnexpectedReificationException}
+import scala.reflect.macros.ReificationException
import scala.tools.nsc.Global
package object reify {
- private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean = false): Reifier { val global: global1.type } = {
+ private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean): Reifier { val global: global1.type } = {
val typer1: typer.type = typer
val universe1: universe.type = universe
val mirror1: mirror.type = mirror
@@ -24,7 +23,8 @@ package object reify {
private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = {
import global._
- import definitions._
+ import definitions.JavaUniverseClass
+
val enclosingErasure = {
val rClassTree = reifyEnclosingRuntimeClass(global)(typer0)
// HACK around SI-6259
@@ -56,7 +56,7 @@ package object reify {
if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
}
- tpe.normalize match {
+ tpe.dealiasWiden match {
case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete)
gen.mkMethodCall(arrayClassMethod, List(componentErasure))
@@ -71,7 +71,6 @@ package object reify {
// a class/object body, this will return an EmptyTree.
def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = {
import global._
- import definitions._
def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[ImplDef])
if (isThisInScope) {
val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef }
diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala
index 4d1e22abe7..abd179b24b 100644
--- a/src/compiler/scala/reflect/reify/phases/Calculate.scala
+++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala
@@ -5,7 +5,6 @@ trait Calculate {
self: Reifier =>
import global._
- import definitions._
implicit class RichCalculateSymbol(sym: Symbol) {
def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) }
@@ -30,7 +29,7 @@ trait Calculate {
* Merely traverses the reifiee and records local symbols along with their metalevels.
*/
val calculate = new Traverser {
- // see the explanation of metalevels in ``Metalevels''
+ // see the explanation of metalevels in `Metalevels`
var currMetalevel = 1
override def traverse(tree: Tree): Unit = tree match {
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index fbbd12a42f..18ea908cdf 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -1,11 +1,12 @@
package scala.reflect.reify
package phases
+import scala.collection.{ mutable }
+
trait Metalevels {
self: Reifier =>
import global._
- import definitions._
/**
* Makes sense of cross-stage bindings.
@@ -39,15 +40,15 @@ trait Metalevels {
* However, how exactly do we do that in the case of y.splice? In this very scenario we can use dataflow analysis and inline it,
* but what if y were a var, and what if it were calculated randomly at runtime?
*
- * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of ``reify''),
+ * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of `reify`),
* but now we have runtime toolboxes, so noone stops us from picking up that reified tree and evaluating it at runtime
- * (in fact, this is something that ``Expr.splice'' does transparently).
+ * (in fact, this is something that `Expr.splice` does transparently).
*
* This is akin to early vs late binding dilemma.
* The prior is faster, plus, the latter (implemented with reflection) might not work because of visibility issues or might be not available on all platforms.
* But the latter still has its uses, so I'm allowing metalevel breaches, but introducing the -Xlog-runtime-evals to log them.
*
- * upd. We no longer do that. In case of a runaway ``splice'' inside a `reify`, one will get a static error.
+ * upd. We no longer do that. In case of a runaway `splice` inside a `reify`, one will get a static error.
* Why? Unfortunately, the cute idea of transparently converting between static and dynamic splices has failed.
* 1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath
* 2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it
@@ -102,7 +103,7 @@ trait Metalevels {
*/
val metalevels = new Transformer {
var insideSplice = false
- var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
+ val inlineableBindings = mutable.Map[TermName, Tree]()
def withinSplice[T](op: => T) = {
val old = insideSplice
@@ -124,7 +125,7 @@ trait Metalevels {
withinSplice { super.transform(TreeSplice(ReifiedTree(universe, mirror, symtab1, rtree, tpe, rtpe, concrete))) }
case TreeSplice(splicee) =>
if (reifyDebug) println("entering splice: " + splicee)
- val breaches = splicee filter (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ val breaches = splicee filter (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
if (!insideSplice && breaches.nonEmpty) {
// we used to convert dynamic splices into runtime evals transparently, but we no longer do that
// why? see comments above
@@ -135,7 +136,7 @@ trait Metalevels {
} else {
withinSplice { super.transform(tree) }
}
- // todo. also inline usages of ``inlineableBindings'' in the symtab itself
+ // todo. also inline usages of `inlineableBindings` in the symtab itself
// e.g. a free$Foo can well use free$x, if Foo is path-dependent w.r.t x
// FreeRef(_, _) check won't work, because metalevels of symbol table and body are different, hence, freerefs in symbol table look different from freerefs in body
case FreeRef(_, name) if inlineableBindings contains name =>
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index 8e13a45cdb..143424dac5 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -2,7 +2,6 @@ package scala.reflect.reify
package phases
import scala.runtime.ScalaRunTime.isAnyVal
-import scala.runtime.ScalaRunTime.isTuple
import scala.reflect.reify.codegen._
trait Reify extends GenSymbols
@@ -16,7 +15,6 @@ trait Reify extends GenSymbols
self: Reifier =>
import global._
- import definitions._
private object reifyStack {
def currents: List[Any] = state.reifyStack
@@ -37,7 +35,7 @@ trait Reify extends GenSymbols
/**
* Reifies any supported value.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reify(reifee: Any): Tree = reifyStack.push(reifee)(reifee match {
// before adding some case here, in global scope, please, consider
@@ -59,4 +57,4 @@ trait Reify extends GenSymbols
case _ =>
throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass))
})
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index a320718084..bb5cb53d7d 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -48,13 +48,13 @@ trait Reshape {
val Template(parents, self, body) = impl
var body1 = trimAccessors(classDef, reshapeLazyVals(body))
body1 = trimSyntheticCaseClassMembers(classDef, body1)
- var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ val impl1 = Template(parents, self, body1).copyAttrs(impl)
ClassDef(mods, name, params, impl1).copyAttrs(classDef)
case moduledef @ ModuleDef(mods, name, impl) =>
val Template(parents, self, body) = impl
var body1 = trimAccessors(moduledef, reshapeLazyVals(body))
body1 = trimSyntheticCaseClassMembers(moduledef, body1)
- var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ val impl1 = Template(parents, self, body1).copyAttrs(impl)
ModuleDef(mods, name, impl1).copyAttrs(moduledef)
case template @ Template(parents, self, body) =>
val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded
@@ -89,8 +89,8 @@ trait Reshape {
}
private def undoMacroExpansion(tree: Tree): Tree =
- tree.attachments.get[MacroExpansionAttachment] match {
- case Some(MacroExpansionAttachment(original)) =>
+ tree.attachments.get[analyzer.MacroExpansionAttachment] match {
+ case Some(analyzer.MacroExpansionAttachment(original, _)) =>
original match {
// this hack is necessary until I fix implicit macros
// so far tag materialization is implemented by sneaky macros hidden in scala-compiler.jar
@@ -116,7 +116,6 @@ trait Reshape {
private def toPreTyperModifiers(mods: Modifiers, sym: Symbol) = {
if (!sym.annotations.isEmpty) {
- val Modifiers(flags, privateWithin, annotations) = mods
val postTyper = sym.annotations filter (_.original != EmptyTree)
if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for: " + sym)
if (reifyDebug && !postTyper.isEmpty) println("originals are: " + sym.annotations)
@@ -131,8 +130,8 @@ trait Reshape {
*
* NB: This is the trickiest part of reification!
*
- * In most cases, we're perfectly fine to reify a Type itself (see ``reifyType'').
- * However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''),
+ * In most cases, we're perfectly fine to reify a Type itself (see `reifyType`).
+ * However if the type involves a symbol declared inside the quasiquote (i.e. registered in `boundSyms`),
* then we cannot reify it, or otherwise subsequent reflective compilation will fail.
*
* Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
@@ -140,7 +139,7 @@ trait Reshape {
* https://issues.scala-lang.org/browse/SI-5230
*
* To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
- * Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees.
+ * Luckily, these original trees get preserved for us in the `original` field when Trees get transformed into TypeTrees.
* And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
* In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
*
@@ -157,8 +156,8 @@ trait Reshape {
* upd. There are also problems with CompoundTypeTrees. I had to use attachments to retain necessary information.
*
* upd. Recently I went ahead and started using original for all TypeTrees, regardless of whether they refer to local symbols or not.
- * As a result, ``reifyType'' is never called directly by tree reification (and, wow, it seems to work great!).
- * The only usage of ``reifyType'' now is for servicing typetags, however, I have some ideas how to get rid of that as well.
+ * As a result, `reifyType` is never called directly by tree reification (and, wow, it seems to work great!).
+ * The only usage of `reifyType` now is for servicing typetags, however, I have some ideas how to get rid of that as well.
*/
private def isDiscarded(tt: TypeTree) = tt.original == null
private def toPreTyperTypeTree(tt: TypeTree): Tree = {
@@ -257,10 +256,10 @@ trait Reshape {
val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef
val name1 = nme.dropLocalSuffix(name0)
val Modifiers(flags0, privateWithin0, annotations0) = mods0
- var flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
+ val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
- ValDef(mods2, name1, tpt0, extractRhs(rhs0))
+ ValDef(mods2, name1.toTermName, tpt0, extractRhs(rhs0))
}
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
@@ -272,7 +271,7 @@ trait Reshape {
def detectBeanAccessors(prefix: String): Unit = {
if (defdef.name.startsWith(prefix)) {
- var name = defdef.name.toString.substring(prefix.length)
+ val name = defdef.name.toString.substring(prefix.length)
def uncapitalize(s: String) = if (s.length == 0) "" else { val chars = s.toCharArray; chars(0) = chars(0).toLower; new String(chars) }
def findValDef(name: String) = (symdefs.values collect { case vdef: ValDef if nme.dropLocalSuffix(vdef.name).toString == name => vdef }).headOption
val valdef = findValDef(name).orElse(findValDef(uncapitalize(name))).orNull
@@ -282,13 +281,13 @@ trait Reshape {
detectBeanAccessors("get")
detectBeanAccessors("set")
detectBeanAccessors("is")
- });
+ })
- var stats1 = stats flatMap {
+ val stats1 = stats flatMap {
case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy =>
val mods1 = if (accessors.contains(vdef)) {
val ddef = accessors(vdef)(0) // any accessor will do
- val Modifiers(flags, privateWithin, annotations) = mods
+ val Modifiers(flags, _, annotations) = mods
var flags1 = flags & ~LOCAL
if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE
val privateWithin1 = ddef.mods.privateWithin
@@ -299,7 +298,7 @@ trait Reshape {
}
val mods2 = toPreTyperModifiers(mods1, vdef.symbol)
val name1 = nme.dropLocalSuffix(name)
- val vdef1 = ValDef(mods2, name1, tpt, rhs)
+ val vdef1 = ValDef(mods2, name1.toTermName, tpt, rhs)
if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1))
Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync
case ddef: DefDef if !ddef.mods.isLazy =>
@@ -331,7 +330,8 @@ trait Reshape {
case Some(ddef) =>
toPreTyperLazyVal(ddef)
case None =>
- CannotReifyInvalidLazyVal(vdef)
+ if (reifyDebug) println("couldn't find corresponding lazy val accessor")
+ vdef
}
if (reifyDebug) println(s"reconstructed lazy val is $vdef1")
vdef1::Nil
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index 134ae13890..d57188bf6e 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -187,7 +187,7 @@ trait Extractors {
Literal(Constant(origin: String)))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeTerm == nme.newFreeTerm &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
+ Some((uref1, name, reifyBinding(tree), flags, origin))
case _ =>
None
}
@@ -204,7 +204,7 @@ trait Extractors {
Literal(Constant(origin: String)))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeType == nme.newFreeType &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
+ Some((uref1, name, reifyBinding(tree), flags, origin))
case _ =>
None
}
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index aca18c7df7..0903bc481c 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -11,8 +11,6 @@ trait NodePrinters {
self: Utils =>
import global._
- import definitions._
- import Flag._
object reifiedNodeToString extends (Tree => String) {
def apply(tree: Tree): String = {
@@ -25,8 +23,8 @@ trait NodePrinters {
// Rolling a full-fledged, robust TreePrinter would be several times more code.
// Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier.
val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2
- var (List(universe, mirror), reification) = lines
- reification = (for (line <- reification) yield {
+ val (List(universe, mirror), reification0) = lines
+ val reification = (for (line <- reification0) yield {
var s = line substring 2
s = s.replace(nme.UNIVERSE_PREFIX.toString, "")
s = s.replace(".apply", "")
@@ -38,8 +36,8 @@ trait NodePrinters {
flagsAreUsed = true
show(m.group(1).toLong)
})
- s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
- s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
+ s = s.replace("Modifiers(0L, TypeName(\"\"), List())", "Modifiers()")
+ s = """Modifiers\((\d+)[lL], TypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
val buf = new scala.collection.mutable.ListBuffer[String]
val annotations = m.group(3)
@@ -48,7 +46,7 @@ trait NodePrinters {
val privateWithin = "" + m.group(2)
if (buf.nonEmpty || privateWithin != "")
- buf.append("newTypeName(\"" + privateWithin + "\")")
+ buf.append("TypeName(\"" + privateWithin + "\")")
val bits = m.group(1)
if (buf.nonEmpty || bits != "0L") {
@@ -73,14 +71,14 @@ trait NodePrinters {
s.trim
})
- val printout = scala.collection.mutable.ListBuffer[String]();
+ val printout = scala.collection.mutable.ListBuffer[String]()
printout += universe.trim
if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim
- val imports = scala.collection.mutable.ListBuffer[String]();
- imports += nme.UNIVERSE_SHORT
+ val imports = scala.collection.mutable.ListBuffer[String]()
+ imports += nme.UNIVERSE_SHORT.toString
// if (buildIsUsed) imports += nme.build
- if (mirrorIsUsed) imports += nme.MIRROR_SHORT
- if (flagsAreUsed) imports += nme.Flag
+ if (mirrorIsUsed) imports += nme.MIRROR_SHORT.toString
+ if (flagsAreUsed) imports += nme.Flag.toString
printout += s"""import ${imports map (_ + "._") mkString ", "}"""
val name = if (isExpr) "tree" else "tpe"
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
index dbb0836e0a..5f8de9894f 100644
--- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -8,8 +8,6 @@ trait SymbolTables {
self: Utils =>
import global._
- import definitions._
- import Flag._
class SymbolTable private[SymbolTable] (
private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](),
@@ -17,9 +15,6 @@ trait SymbolTables {
private[SymbolTable] val original: Option[List[Tree]] = None) {
def syms: List[Symbol] = symtab.keys.toList
- def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined)
-
-// def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2))
def symDef(sym: Symbol): Tree =
symtab.getOrElse(sym, EmptyTree)
@@ -89,11 +84,6 @@ trait SymbolTables {
add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
}
- private def add(sym: Symbol, name: TermName): SymbolTable = {
- if (!(syms contains sym)) error("cannot add an alias to a symbol not in the symbol table")
- add(sym, name, EmptyTree)
- }
-
private def remove(sym: Symbol): SymbolTable = {
val newSymtab = symtab - sym
val newAliases = aliases filter (_._1 != sym)
@@ -107,7 +97,7 @@ trait SymbolTables {
newSymtab = newSymtab map { case ((sym, tree)) =>
val ValDef(mods, primaryName, tpt, rhs) = tree
val tree1 =
- if (!(newAliases contains (sym, primaryName))) {
+ if (!(newAliases contains ((sym, primaryName)))) {
val primaryName1 = newAliases.find(_._1 == sym).get._2
ValDef(mods, primaryName1, tpt, rhs).copyAttrs(tree)
} else tree
@@ -143,7 +133,7 @@ trait SymbolTables {
var result = new SymbolTable(original = Some(encoded))
encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
case (Some(ReifyBindingAttachment(_)), _) => result += entry
- case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias))
+ case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ ((sym, alias)))
case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
})
result
@@ -214,4 +204,4 @@ trait SymbolTables {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index 255efe55ec..3c1bc8cad9 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -65,7 +65,7 @@ class Pack200Task extends ScalaMatchingTask {
/** Set the flag to specify if file reordering should be performed. Reordering
* is used to remove empty packages and improve pack200 optimization.
- * @param keep
+ * @param x
* `'''true'''` to retain file ordering.
* `'''false'''` to optimize directory structure (DEFAULT). */
def setKeepFileOrder(x: Boolean) { keepFileOrder = x }
@@ -99,8 +99,8 @@ class Pack200Task extends ScalaMatchingTask {
private def getFileList: List[File] = {
var files: List[File] = Nil
val fs = getImplicitFileSet
- var ds = fs.getDirectoryScanner(getProject())
- var dir = fs.getDir(getProject())
+ val ds = fs.getDirectoryScanner(getProject())
+ val dir = fs.getDir(getProject())
for (filename <- ds.getIncludedFiles()
if filename.toLowerCase.endsWith(".jar")) {
val file = new File(dir, filename)
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index e53679f052..6362d28580 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -32,7 +32,7 @@ import org.apache.tools.ant.types.Mapper
*
* @author Gilles Dubochet
* @version 1.0 */
-class Same extends ScalaMatchingTask {
+@deprecated("Use diff", "2.11.0") class Same extends ScalaMatchingTask {
/*============================================================================*\
** Ant user-properties **
\*============================================================================*/
@@ -110,7 +110,7 @@ class Same extends ScalaMatchingTask {
\*============================================================================*/
override def execute() = {
- validateAttributes
+ validateAttributes()
val mapper = getMapper
allEqualNow = true
val originNames: Array[String] = getDirectoryScanner(origin.get).getIncludedFiles
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index 57d24f6213..e7ac53c8fb 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -108,7 +108,7 @@ class ScalaTool extends ScalaMatchingTask {
* for general purpose scripts, as this does not assume all elements are
* relative to the Ant `basedir`. Additionally, the platform specific
* demarcation of any script variables (e.g. `${SCALA_HOME}` or
- * `%SCALA_HOME%`) can be specified in a platform independant way (e.g.
+ * `%SCALA_HOME%`) can be specified in a platform independent way (e.g.
* `@SCALA_HOME@`) and automatically translated for you.
*/
def setClassPath(input: String) {
@@ -190,13 +190,13 @@ class ScalaTool extends ScalaMatchingTask {
val builder = new StringBuilder()
while (chars.hasNext) {
- val char = chars.next
+ val char = chars.next()
if (char == '@') {
- var char = chars.next
+ var char = chars.next()
val token = new StringBuilder()
while (chars.hasNext && char != '@') {
token.append(char)
- char = chars.next
+ char = chars.next()
}
if (token.toString == "")
builder.append('@')
@@ -212,13 +212,13 @@ class ScalaTool extends ScalaMatchingTask {
val builder = new StringBuilder()
while (chars.hasNext) {
- val char = chars.next
+ val char = chars.next()
if (char == '@') {
- var char = chars.next
+ var char = chars.next()
val token = new StringBuilder()
while (chars.hasNext && char != '@') {
token.append(char)
- char = chars.next
+ char = chars.next()
}
if (tokens.contains(token.toString))
builder.append(tokens(token.toString))
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 73d09e82ba..b2cedf6338 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -55,8 +55,6 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* - `usejavacp`,
* - `failonerror`,
* - `scalacdebugging`,
- * - `assemname`,
- * - `assemrefs`.
*
* It also takes the following parameters as nested elements:
* - `src` (for `srcdir`),
@@ -99,7 +97,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil")
+ val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7")
}
/** Defines valid values for the `deprecation` and `unchecked` properties. */
@@ -169,11 +167,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Indicates whether compilation errors will fail the build; defaults to true. */
protected var failonerror: Boolean = true
- // Name of the output assembly (only relevant with -target:msil)
- protected var assemname: Option[String] = None
- // List of assemblies referenced by the program (only relevant with -target:msil)
- protected var assemrefs: Option[String] = None
-
/** Prints out the files being compiled by the scalac ant task
* (not only the number of files). */
protected var scalacDebugging: Boolean = false
@@ -420,9 +413,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
* @param input The specified flag */
def setScalacdebugging(input: Boolean) { scalacDebugging = input }
- def setAssemname(input: String) { assemname = Some(input) }
- def setAssemrefs(input: String) { assemrefs = Some(input) }
-
/** Sets the `compilerarg` as a nested compilerarg Ant parameter.
* @return A compiler argument to be configured. */
def createCompilerArg(): ImplementationSpecificArgument = {
@@ -505,7 +495,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
path.map(asString) mkString File.pathSeparator
/** Transforms a file into a Scalac-readable string.
- * @param path A file to convert.
+ * @param file A file to convert.
* @return A string-representation of the file like `/x/k/a.scala`. */
protected def asString(file: File): String =
file.getAbsolutePath()
@@ -518,7 +508,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
new Settings(error)
protected def newGlobal(settings: Settings, reporter: Reporter) =
- new Global(settings, reporter)
+ Global(settings, reporter)
/*============================================================================*\
** The big execute method **
@@ -612,9 +602,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get
if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get
- if (!assemname.isEmpty) settings.assemname.value = assemname.get
- if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get
-
val jvmargs = scalacCompilerArgs.getArgs filter (_ startsWith "-J")
if (!jvmargs.isEmpty) settings.jvmargs.value = jvmargs.toList
val defines = scalacCompilerArgs.getArgs filter (_ startsWith "-D")
@@ -685,7 +672,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
file
}
- val res = execWithArgFiles(java, List(writeSettings.getAbsolutePath))
+ val res = execWithArgFiles(java, List(writeSettings().getAbsolutePath))
if (failonerror && res != 0)
buildError("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml
index 78159e6d10..7885534689 100644
--- a/src/compiler/scala/tools/ant/antlib.xml
+++ b/src/compiler/scala/tools/ant/antlib.xml
@@ -11,8 +11,6 @@
classname="scala.tools.ant.Scaladoc"/>
<taskdef name="scalatool"
classname="scala.tools.ant.ScalaTool"/>
- <taskdef name="same"
- classname="scala.tools.ant.Same"/>
<taskdef name="pack200"
classname="scala.tools.ant.Pack200Task"/>
</antlib>
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 9cdf484080..76820b8060 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -80,7 +80,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
private def createMapper() = {
val mapper = new GlobPatternMapper()
- val extension = if (isMSIL) "*.msil" else "*.class"
+ val extension = "*.class"
mapper setTo extension
mapper setFrom "*.scala"
@@ -104,9 +104,6 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
sourcePath foreach (settings.sourcepath = _)
settings.extraParams = extraArgsFlat
- if (isMSIL)
- settings.sourcedir = sourceDir
-
val mapper = createMapper()
val includedFiles: Array[File] =
@@ -122,7 +119,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
return
if (includedFiles.nonEmpty)
- log("Compiling %d file%s to %s".format(includedFiles.size, plural(includedFiles.size), destinationDir))
+ log("Compiling %d file%s to %s".format(includedFiles.length, plural(includedFiles.length), destinationDir))
argfile foreach (x => log("Using argfile file: @" + x))
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index fde61e9564..d0fefdaa03 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -10,7 +10,7 @@ package scala.tools.ant.sabbus
import java.io.File
-import org.apache.tools.ant.types.{Path, Reference}
+import org.apache.tools.ant.types.Path
class Settings {
diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
index 6bb1aaa306..b061bcf7fb 100644
--- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
+++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
@@ -98,6 +98,4 @@ trait TaskArgs extends CompilationPathProperty {
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
}
-
- def isMSIL = compTarget exists (_ == "msil")
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index 2c97232aec..5f50bb7908 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -53,9 +53,9 @@ class Use extends ScalaMatchingTask {
compiler.settings.d,
mapper
) map (new File(sourceDir.get, _))
- if (includedFiles.size > 0)
+ if (includedFiles.length > 0)
try {
- log("Compiling " + includedFiles.size + " file" + (if (includedFiles.size > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
+ log("Compiling " + includedFiles.length + " file" + (if (includedFiles.length > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
val (errors, warnings) = compiler.compile(includedFiles)
if (errors > 0)
sys.error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".")
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f1c6c52785..84ccaba749 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -102,6 +102,9 @@ if [[ -n "$cygwin" ]]; then
format=windows
fi
SCALA_HOME="$(cygpath --$format "$SCALA_HOME")"
+ if [[ -n "$JAVA_HOME" ]]; then
+ JAVA_HOME="$(cygpath --$format "$JAVA_HOME")"
+ fi
TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")"
elif [[ -n "$mingw" ]]; then
SCALA_HOME="$(cmd //c echo "$SCALA_HOME")"
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index 75f96d3c4b..cf0463423c 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -19,7 +19,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
def this(spec: Reference, line: String) = this(spec, Parser tokenize line)
def this(spec: Reference, args: Array[String]) = this(spec, args.toList)
- import spec.{ isAnyOption, isUnaryOption, isBinaryOption, isExpandOption }
+ import spec.{ isUnaryOption, isBinaryOption, isExpandOption }
val Terminator = "--"
val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index cba2e99998..433bbb167e 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -6,7 +6,7 @@
package scala.tools
package cmd
-import nsc.io.{ Path, File, Directory }
+import scala.tools.nsc.io.{ File, Directory }
import scala.reflect.runtime.{universe => ru}
import scala.tools.reflect.StdRuntimeTags._
@@ -24,18 +24,11 @@ abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction
}
object FromString {
- // We need these because we clash with the String => Path implicits.
- private def toFile(s: String) = new File(new java.io.File(s))
+ // We need this because we clash with the String => Path implicits.
private def toDir(s: String) = new Directory(new java.io.File(s))
/** Path related stringifiers.
*/
- val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) {
- override def isDefinedAt(s: String) = toFile(s).isFile
- def apply(s: String): File =
- if (isDefinedAt(s)) toFile(s)
- else cmd.runAndExit(println("'%s' is not an existing file." format s))
- }
val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) {
override def isDefinedAt(s: String) = toDir(s).isDirectory
def apply(s: String): Directory =
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index bcbb454771..ec2a414065 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -26,7 +26,6 @@ trait Reference extends Spec {
def isUnaryOption(s: String) = unary contains toOpt(s)
def isBinaryOption(s: String) = binary contains toOpt(s)
def isExpandOption(s: String) = expansionMap contains toOpt(s)
- def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s)
def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg))
@@ -46,7 +45,7 @@ object Reference {
val MaxLine = 80
class Accumulators() {
- private var _help = new ListBuffer[() => String]
+ private val _help = new ListBuffer[() => String]
private var _unary = List[String]()
private var _binary = List[String]()
private var _expand = Map[String, List[String]]()
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index dbd2195938..35d4eaf1b6 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -13,7 +13,7 @@ trait AnyValReps {
sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
- case class Op(val op : String, val doc : String)
+ case class Op(op : String, doc : String)
private def companionCoercions(tos: AnyValRep*) = {
tos.toList map (to =>
diff --git a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
index 903517c5b4..ee7e605425 100644
--- a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
+++ b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
@@ -12,8 +12,6 @@ trait CodegenSpec extends Spec with Meta.StdOpts with Interpolation {
def referenceSpec = CodegenSpec
def programInfo = Spec.Info("codegen", "", "scala.tools.cmd.gen.Codegen")
- import FromString.ExistingDir
-
help("Usage: codegen [<options>]")
// val inDir = "in" / "directory containing templates" --^ ExistingDir
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 355a1fd262..15d365ab8c 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
import util.FreshNameCreator
-import scala.reflect.internal.util.{ Position, NoPosition, BatchSourceFile, SourceFile, NoSourceFile }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
@@ -26,7 +26,7 @@ trait CompilationUnits { self: Global =>
class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
- var fresh: FreshNameCreator = new FreshNameCreator.Default
+ val fresh: FreshNameCreator = new FreshNameCreator.Default
def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix))
def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix))
@@ -36,24 +36,34 @@ trait CompilationUnits { self: Global =>
def exists = source != NoSourceFile && source != null
-// def parseSettings() = {
-// val argsmarker = "SCALAC_ARGS"
-// if(comments nonEmpty) {
-// val pragmas = comments find (_.text.startsWith("//#")) // only parse first one
-// pragmas foreach { p =>
-// val i = p.text.indexOf(argsmarker)
-// if(i > 0)
-// }
-// }
-// }
/** Note: depends now contains toplevel classes.
* To get their sourcefiles, you need to dereference with .sourcefile
*/
- val depends = mutable.HashSet[Symbol]()
+ private[this] val _depends = mutable.HashSet[Symbol]()
+ // SBT compatibility (SI-6875)
+ //
+ // imagine we have a file named A.scala, which defines a trait named Foo and a module named Main
+ // Main contains a call to a macro, which calls c.introduceTopLevel to define a mock for Foo
+ // c.introduceTopLevel creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
+ // and macro expansion instantiates FooMock. the stage is now set. let's see what happens next.
+ //
+ // without this workaround in scalac or without being patched itself, sbt will think that
+ // * Virt35af32 depends on A (because it extends Foo from A)
+ // * A depends on Virt35af32 (because it contains a macro expansion referring to FooMock from Virt35af32)
+ //
+ // after compiling A.scala, SBT will notice that it has a new source file named Virt35af32.
+ // it will also think that this file hasn't yet been compiled and since A depends on it
+ // it will think that A needs to be recompiled.
+ //
+ // recompilation will lead to another macro expansion. that another macro expansion might choose to create a fresh mock,
+ // producing another virtual file, say, Virtee509a, which will again trick SBT into thinking that A needs a recompile,
+ // which will lead to another macro expansion, which will produce another virtual file and so on
+ def depends = if (exists && !source.file.isVirtual) _depends else mutable.HashSet[Symbol]()
/** so we can relink
*/
- val defined = mutable.HashSet[Symbol]()
+ private[this] val _defined = mutable.HashSet[Symbol]()
+ def defined = if (exists && !source.file.isVirtual) _defined else mutable.HashSet[Symbol]()
/** Synthetic definitions generated by namer, eliminated by typer.
*/
@@ -123,18 +133,5 @@ trait CompilationUnits { self: Global =>
lazy val isJava = source.file.name.endsWith(".java")
override def toString() = source.toString()
-
- def clear() {
- fresh = new FreshNameCreator.Default
- body = EmptyTree
- depends.clear()
- defined.clear()
- synthetics.clear()
- toCheck.clear()
- checkedFeatures = Set()
- icode.clear()
- }
}
}
-
-
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 731f6926f0..c756a1b0d9 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -5,7 +5,6 @@
package scala.tools.nsc
-import java.io.{ BufferedReader, File, InputStreamReader, PrintWriter }
import settings.FscSettings
import scala.tools.util.CompileOutputCommon
import sys.SystemProperties.preferIPv4Stack
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 7a0a072bb8..c5366566d9 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import java.io.{ BufferedOutputStream, FileOutputStream, PrintStream }
+import java.io.PrintStream
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.reflect.internal.util.FakePos //Position
import scala.tools.util.SocketServer
@@ -29,8 +29,6 @@ class StandardCompileServer extends SocketServer {
var shutdown = false
var verbose = false
- val versionMsg = "Fast " + Properties.versionMsg
-
val MaxCharge = 0.8
private val runtime = Runtime.getRuntime()
@@ -58,9 +56,6 @@ class StandardCompileServer extends SocketServer {
(totalMemory - freeMemory).toDouble / maxMemory.toDouble > MaxCharge
}
- protected def newOfflineCompilerCommand(arguments: List[String], settings: FscSettings): OfflineCompilerCommand =
- new OfflineCompilerCommand(arguments, settings)
-
/** Problematically, Settings are only considered equal if every setting
* is exactly equal. In fsc this immediately breaks down because the randomly
* chosen temporary outdirs differ between client and server. Among other
@@ -92,7 +87,7 @@ class StandardCompileServer extends SocketServer {
val args = input.split("\0", -1).toList
val newSettings = new FscSettings(fscError)
- val command = newOfflineCompilerCommand(args, newSettings)
+ val command = new OfflineCompilerCommand(args, newSettings)
this.verbose = newSettings.verbose.value
info("Settings after normalizing paths: " + newSettings)
@@ -120,7 +115,7 @@ class StandardCompileServer extends SocketServer {
reporter = new ConsoleReporter(newSettings, in, out) {
// disable prompts, so that compile server cannot block
- override def displayPrompt = ()
+ override def displayPrompt() = ()
}
def isCompilerReusable: Boolean = {
if (compiler == null) {
@@ -162,7 +157,7 @@ class StandardCompileServer extends SocketServer {
}
}
reporter.printSummary()
- if (isMemoryFullEnough) {
+ if (isMemoryFullEnough()) {
info("Nulling out compiler due to memory utilization.")
clearCompiler()
}
@@ -177,9 +172,9 @@ object CompileServer extends StandardCompileServer {
private def createRedirect(filename: String) =
new PrintStream((redirectDir / filename).createFile().bufferedOutput())
- def main(args: Array[String]) =
+ def main(args: Array[String]) =
execute(() => (), args)
-
+
/**
* Used for internal testing. The callback is called upon
* server start, notifying the caller that the server is
@@ -204,7 +199,7 @@ object CompileServer extends StandardCompileServer {
compileSocket setPort port
startupCallback()
run()
-
+
compileSocket deletePort port
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 4051bda914..8087a31b45 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -5,13 +5,9 @@
package scala.tools.nsc
-import java.io.{ IOException, FileNotFoundException, PrintWriter, FileOutputStream }
-import java.io.{ BufferedReader, FileReader }
-import java.util.regex.Pattern
-import java.net._
+import java.io.{ FileNotFoundException, PrintWriter, FileOutputStream }
import java.security.SecureRandom
import io.{ File, Path, Directory, Socket }
-import scala.util.control.Exception.catching
import scala.tools.util.CompileOutputCommon
import scala.reflect.internal.util.StringOps.splitWhere
import scala.sys.process._
@@ -190,7 +186,7 @@ class CompileSocket extends CompileOutputCommon {
catch { case _: NumberFormatException => None }
def getSocket(serverAdr: String): Socket = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index e994150f6f..f1f5130fb8 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -5,7 +5,6 @@
package scala.tools.nsc
-import scala.collection.mutable.ListBuffer
import io.File
/** A class representing command line info for scalac */
@@ -15,9 +14,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
type Setting = Settings#Setting
- /** file extensions of files that the compiler can process */
- lazy val fileEndings = Properties.fileEndings
-
private val processArgumentsResult =
if (shouldProcessArguments) processArguments
else (true, Nil)
@@ -41,8 +37,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
""".stripMargin.trim + "\n"
def shortUsage = "Usage: %s <options> <source files>" format cmdName
- def createUsagePreface(shouldExplain: Boolean) =
- if (shouldExplain) shortUsage + "\n" + explainAdvanced else ""
/** Creates a help message for a subset of options based on cond */
def createUsageMsg(cond: Setting => Boolean): String = {
@@ -82,9 +76,9 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
}
/** Messages explaining usage and options */
- def usageMsg = createUsageMsg("where possible standard", false, _.isStandard)
- def xusageMsg = createUsageMsg("Possible advanced", true, _.isAdvanced)
- def yusageMsg = createUsageMsg("Possible private", true, _.isPrivate)
+ def usageMsg = createUsageMsg("where possible standard", shouldExplain = false, _.isStandard)
+ def xusageMsg = createUsageMsg("Possible advanced", shouldExplain = true, _.isAdvanced)
+ def yusageMsg = createUsageMsg("Possible private", shouldExplain = true, _.isPrivate)
// If any of these settings is set, the compiler shouldn't start;
// an informative message of some sort should be printed instead.
@@ -128,6 +122,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
case x => List(x)
}
- settings.processArguments(expandedArguments, true)
+ settings.processArguments(expandedArguments, processAll = true)
}
}
diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala
deleted file mode 100644
index 6746b08155..0000000000
--- a/src/compiler/scala/tools/nsc/CompilerRun.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-
-class CompilerRun {
- def firstPhase: Phase = NoPhase
- def terminalPhase: Phase = NoPhase
- def namerPhase: Phase = NoPhase
- def typerPhase: Phase = NoPhase
- def refchecksPhase: Phase = NoPhase
- def explicitouterPhase: Phase = NoPhase
- def erasurePhase: Phase = NoPhase
- def flattenPhase: Phase = NoPhase
- def mixinPhase: Phase = NoPhase
- def icodePhase: Phase = NoPhase
- def phaseNamed(name: String): Phase = NoPhase
-}
-
diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
index 5c5606e98b..6c16d19d2c 100644
--- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala
+++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
@@ -13,9 +13,9 @@ import java.io.Writer
* @version 1.0
*/
class ConsoleWriter extends Writer {
- def close = flush
+ def close() = flush()
- def flush = Console.flush
+ def flush() = Console.flush()
def write(cbuf: Array[Char], off: Int, len: Int) {
if (len > 0)
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index 814bd58a66..fc247600f6 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -1,11 +1,11 @@
package scala.tools.nsc
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import scala.tools.nsc.reporters.ConsoleReporter
import Properties.{ versionString, copyrightString, residentPromptString }
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos }
+import scala.reflect.internal.util.FakePos
abstract class Driver {
-
+
val prompt = residentPromptString
val versionMsg = "Scala compiler " +
@@ -68,4 +68,4 @@ abstract class Driver {
sys.exit(if (reporter.hasErrors) 1 else 0)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index c4147fad4c..15a296c836 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -14,7 +14,7 @@ trait EvalLoop {
def loop(action: (String) => Unit) {
@tailrec def inner() {
Console.print(prompt)
- val line = try Console.readLine catch { case _: EOFException => null }
+ val line = try Console.readLine() catch { case _: EOFException => null }
if (line != null && line != "") {
action(line)
inner()
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index c8fd5985c6..e710222285 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -26,7 +26,7 @@ extends CompilerCommand(args, settings) {
// change CompilerCommand behavior
override def shouldProcessArguments: Boolean = false
- private lazy val (_ok, targetAndArguments) = settings.processArguments(args, false)
+ private lazy val (_ok, targetAndArguments) = settings.processArguments(args, processAll = false)
override def ok = _ok
private def guessHowToRun(target: String): GenericRunnerCommand.HowToRun = {
if (!ok) Error
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index 9c2db11a56..ad75d02bff 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -39,7 +39,4 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
val nc = BooleanSetting(
"-nc",
"do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon"
-
- @deprecated("Use `nc` instead", "2.9.0") def nocompdaemon = nc
- @deprecated("Use `save` instead", "2.9.0") def savecompiled = save
}
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 7a67ac6246..7ee3ee551f 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -8,17 +8,14 @@ package scala.tools.nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
import scala.compat.Platform.currentTime
-import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
-import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString, stackTraceHeadString }
+import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import settings.{ AestheticSettings }
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
import symtab.classfile.Pickler
-import dependencies.DependencyAnalysis
import plugins.Plugins
import ast._
import ast.parser._
@@ -26,13 +23,11 @@ import typechecker._
import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
-import backend.jvm.{GenJVM, GenASM}
+import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.jvm.GenASM
import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
import scala.language.postfixOps
-import scala.reflect.internal.StdAttachments
-import scala.reflect.ClassTag
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -47,6 +42,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// the mirror --------------------------------------------------
override def isCompilerUniverse = true
+ override val useOffsetPositions = !currentSettings.Yrangepos.value
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
@@ -75,8 +71,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
- def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
-
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
// platform specific elements
@@ -84,8 +78,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type ThisPlatform = Platform { val global: Global.this.type }
lazy val platform: ThisPlatform =
- if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
- else new { val global: Global.this.type = Global.this } with JavaPlatform
+ new { val global: Global.this.type = Global.this } with JavaPlatform
type PlatformClassPath = ClassPath[platform.BinaryRepr]
type OptClassPath = Option[PlatformClassPath]
@@ -173,7 +166,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (lastPrintedSource == source)
println(": tree is unchanged since " + lastPrintedPhase)
else {
- lastPrintedPhase = phase.prev // since we're running inside "afterPhase"
+ lastPrintedPhase = phase.prev // since we're running inside "exitingPhase"
lastPrintedSource = source
println("")
println(source)
@@ -236,13 +229,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// of assert and require (but for now I've reproduced them here,
// because there are a million to fix.)
@inline final def assert(assertion: Boolean, message: => Any) {
- Predef.assert(assertion, supplementErrorMessage("" + message))
+ // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument.
+ if (!assertion)
+ throw new java.lang.AssertionError("assertion failed: "+ supplementErrorMessage("" + message))
}
@inline final def assert(assertion: Boolean) {
assert(assertion, "")
}
@inline final def require(requirement: Boolean, message: => Any) {
- Predef.require(requirement, supplementErrorMessage("" + message))
+ // calling Predef.require would send a freshly allocated closure wrapping the one received as argument.
+ if (!requirement)
+ throw new IllegalArgumentException("requirement failed: "+ supplementErrorMessage("" + message))
}
@inline final def require(requirement: Boolean) {
require(requirement, "")
@@ -258,27 +255,28 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (settings.debug.value)
body
}
- // Warnings issued only under -Ydebug. For messages which should reach
- // developer ears, but are not adequately actionable by users.
- @inline final override def debugwarn(msg: => String) {
- if (settings.debug.value)
- warning(msg)
+ /** This is for WARNINGS which should reach the ears of scala developers
+ * whenever they occur, but are not useful for normal users. They should
+ * be precise, explanatory, and infrequent. Please don't use this as a
+ * logging mechanism. !!! is prefixed to all messages issued via this route
+ * to make them visually distinct.
+ */
+ @inline final override def devWarning(msg: => String) {
+ if (settings.developer.value || settings.debug.value)
+ warning("!!! " + msg)
+ else
+ log("!!! " + msg) // such warnings always at least logged
}
private def elapsedMessage(msg: String, start: Long) =
msg + " in " + (currentTime - start) + "ms"
def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg))
- def informProgress(msg: String) = if (opt.verbose) inform("[" + msg + "]")
- def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x))
+ def informProgress(msg: String) = if (settings.verbose.value) inform("[" + msg + "]")
def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
def logError(msg: String, t: Throwable): Unit = ()
- def logAfterEveryPhase[T](msg: String)(op: => T) {
- log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
- }
-
override def shouldLogAtThisPhase = settings.log.isSetByUser && (
(settings.log containsPhase globalPhase) || (settings.log containsPhase phase)
)
@@ -302,7 +300,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
private val reader: SourceReader = {
val defaultEncoding = Properties.sourceEncoding
- val defaultReader = Properties.sourceReader
def loadCharset(name: String) =
try Some(Charset.forName(name))
@@ -315,7 +312,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
None
}
- val charset = opt.encoding flatMap loadCharset getOrElse {
+ val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse {
settings.encoding.value = defaultEncoding // A mandatory charset
Charset.forName(defaultEncoding)
}
@@ -330,62 +327,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- opt.sourceReader flatMap loadReader getOrElse {
+ ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse {
new SourceReader(charset.newDecoder(), reporter)
}
}
- if (!dependencyAnalysis.off)
- dependencyAnalysis.loadDependencyAnalysis()
-
- if (opt.verbose || opt.logClasspath) {
+ if (settings.verbose.value || settings.Ylogcp.value) {
// Uses the "do not truncate" inform
informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]")
informComplete("[search path for class files: " + classPath.asClasspathString + "]")
}
- object opt extends AestheticSettings {
- def settings = Global.this.settings
-
- // protected implicit lazy val globalPhaseOrdering: Ordering[Phase] = Ordering[Int] on (_.id)
- def isActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase
- def wasActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase.prev
-
- // Allows for syntax like scalac -Xshow-class Random@erasure,typer
- private def splitClassAndPhase(str: String, term: Boolean): Name = {
- def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
- (str indexOf '@') match {
- case -1 => mkName(str)
- case idx =>
- val phasePart = str drop (idx + 1)
- settings.Yshow.tryToSetColon(phasePart split ',' toList)
- mkName(str take idx)
- }
- }
-
- // behavior
-
- // debugging
- def checkPhase = wasActive(settings.check)
- def logPhase = isActive(settings.log)
-
- // Write *.icode files right after GenICode when -Xprint-icode was given.
- def writeICodeAtICode = settings.writeICode.isSetByUser && isActive(settings.writeICode)
-
- // showing/printing things
- def browsePhase = isActive(settings.browse)
- def echoFilenames = opt.debug && (opt.verbose || currentRun.size < 5)
- def noShow = settings.Yshow.isDefault
- def printLate = settings.printLate.value
- def printPhase = isActive(settings.Xprint)
- def showNames = List(showClass, showObject).flatten
- def showPhase = isActive(settings.Yshow)
- def showSymbols = settings.Yshowsyms.value
- def showTrees = settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value
- val showClass = optSetting[String](settings.Xshowcls) map (x => splitClassAndPhase(x, false))
- val showObject = optSetting[String](settings.Xshowobj) map (x => splitClassAndPhase(x, true))
- }
-
// The current division between scala.reflect.* and scala.tools.nsc.* is pretty
// clunky. It is often difficult to have a setting influence something without having
// to create it on that side. For this one my strategy is a constant def at the file
@@ -394,11 +346,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// Here comes another one...
override protected val enableTypeVarExperimentals = settings.Xexperimental.value
- // True if -Xscript has been set, indicating a script run.
- def isScriptRun = opt.script.isDefined
-
def getSourceFile(f: AbstractFile): BatchSourceFile =
- if (isScriptRun) ScriptSourceFile(f, reader read f)
+ if (settings.script.isSetByUser) ScriptSourceFile(f, reader read f)
else new BatchSourceFile(f, reader read f)
def getSourceFile(name: String): SourceFile = {
@@ -453,7 +402,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if ((unit ne null) && unit.exists)
lastSeenSourceFile = unit.source
- if (opt.echoFilenames)
+ if (settings.debug.value && (settings.verbose.value || currentRun.size < 5))
inform("[running phase " + name + " on " + unit + "]")
val unit0 = currentUnit
@@ -463,7 +412,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
currentRun.informUnitStarting(this, unit)
apply(unit)
}
- currentRun.advanceUnit
+ currentRun.advanceUnit()
} finally {
//assert(currentUnit == unit)
currentRun.currentUnit = unit0
@@ -472,16 +421,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** Switch to turn on detailed type logs */
- var printTypings = settings.Ytyperdebug.value
- var printInfers = settings.Yinferdebug.value
+ val printTypings = settings.Ytyperdebug.value
+ val printInfers = settings.Yinferdebug.value
// phaseName = "parser"
- object syntaxAnalyzer extends {
+ lazy val syntaxAnalyzer = new {
val global: Global.this.type = Global.this
val runsAfter = List[String]()
val runsRightAfter = None
} with SyntaxAnalyzer
+ import syntaxAnalyzer.{ UnitScanner, UnitParser }
+
// !!! I think we're overdue for all these phase objects being lazy vals.
// There's no way for a Global subclass to provide a custom typer
// despite the existence of a "def newTyper(context: Context): Typer"
@@ -629,7 +580,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with Inliners
- // phaseName = "inlineExceptionHandlers"
+ // phaseName = "inlinehandlers"
object inlineExceptionHandlers extends {
val global: Global.this.type = Global.this
val runsAfter = List("inliner")
@@ -639,7 +590,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// phaseName = "closelim"
object closureElimination extends {
val global: Global.this.type = Global.this
- val runsAfter = List("inlineExceptionHandlers")
+ val runsAfter = List("inlinehandlers")
val runsRightAfter = None
} with ClosureElimination
@@ -650,13 +601,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with DeadCodeElimination
- // phaseName = "jvm", FJBG-based version
- object genJVM extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("dce")
- val runsRightAfter = None
- } with GenJVM
-
// phaseName = "jvm", ASM-based version
object genASM extends {
val global: Global.this.type = Global.this
@@ -664,19 +608,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with GenASM
- // This phase is optional: only added if settings.make option is given.
- // phaseName = "dependencyAnalysis"
- object dependencyAnalysis extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("jvm")
- val runsRightAfter = None
- } with DependencyAnalysis
-
// phaseName = "terminal"
object terminal extends {
val global: Global.this.type = Global.this
val phaseName = "terminal"
- val runsAfter = List("jvm", "msil")
+ val runsAfter = List("jvm")
val runsRightAfter = None
} with SubComponent {
private var cache: Option[GlobalPhase] = None
@@ -691,13 +627,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- // phaseName = "SAMPLE PHASE"
- object sampleTransform extends {
- val global: Global.this.type = Global.this
- val runsAfter = List[String]()
- val runsRightAfter = None
- } with SampleTransform
-
/** The checkers are for validating the compiler data structures
* at phase boundaries.
*/
@@ -794,13 +723,41 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** A description of the phases that will run */
def phaseDescriptions: String = {
- val width = phaseNames map (_.length) max
- val fmt = "%" + width + "s %2s %s\n"
+ val Limit = 16 // phase names should not be absurdly long
+ val MaxCol = 80 // because some of us edit on green screens
+ val maxName = (0 /: phaseNames)(_ max _.length)
+ val width = maxName min Limit
+ val maxDesc = MaxCol - (width + 6) // descriptions not novels
+ val fmt = if (settings.verbose.value) s"%${maxName}s %2s %s%n"
+ else s"%${width}.${width}s %2s %.${maxDesc}s%n"
val line1 = fmt.format("phase name", "id", "description")
val line2 = fmt.format("----------", "--", "-----------")
+
+ // built-in string precision merely truncates
+ import java.util.{ Formattable, FormattableFlags, Formatter }
+ def fmtable(s: String) = new Formattable {
+ override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) {
+ val p = elliptically(s, precision)
+ val w = if (width > 0 && p.length < width) {
+ import FormattableFlags.LEFT_JUSTIFY
+ val leftly = (flags & LEFT_JUSTIFY) == LEFT_JUSTIFY
+ val sb = new StringBuilder
+ def pad() = 1 to width - p.length foreach (_ => sb.append(' '))
+ if (!leftly) pad()
+ sb.append(p)
+ if (leftly) pad()
+ sb.toString
+ } else p
+ formatter.out.append(w)
+ }
+ }
+ def elliptically(s: String, max: Int) =
+ if (max < 0 || s.length <= max) s
+ else if (max < 4) s.take(max)
+ else s.take(max - 3) + "..."
val descs = phaseDescriptors.zipWithIndex map {
- case (ph, idx) => fmt.format(ph.phaseName, idx + 1, phasesDescMap(ph))
+ case (ph, idx) => fmt.format(fmtable(ph.phaseName), idx + 1, fmtable(phasesDescMap(ph)))
}
line1 :: line2 :: descs mkString
}
@@ -830,48 +787,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Returns List of (phase, value) pairs, including only those
* where the value compares unequal to the previous phase's value.
*/
- def afterEachPhase[T](op: => T): List[(Phase, T)] = {
+ def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests
phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) =>
- val value = afterPhase(ph)(op)
+ val value = exitingPhase(ph)(op)
if (res.nonEmpty && res.head._2 == value) res
else ((ph, value)) :: res
} reverse
}
- /** Returns List of ChangeAfterPhase objects, encapsulating those
- * phase transitions where the result of the operation gave a different
- * list than it had when run during the previous phase.
- */
- def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = {
- val ops = ((NoPhase, Nil)) :: afterEachPhase(op)
-
- ops sliding 2 map {
- case (_, before) :: (ph, after) :: Nil =>
- val lost = before filterNot (after contains _)
- val gained = after filterNot (before contains _)
- ChangeAfterPhase(ph, lost, gained)
- case _ => ???
- } toList
- }
private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name)
- case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) {
- private def mkStr(what: String, xs: List[_]) = (
- if (xs.isEmpty) ""
- else xs.mkString(what + " after " + numberedPhase(ph) + " {\n ", "\n ", "\n}\n")
- )
- override def toString = mkStr("Lost", lost) + mkStr("Gained", gained)
- }
-
- def describeAfterEachPhase[T](op: => T): List[String] =
- afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) }
-
- def describeAfterEveryPhase[T](op: => T): String =
- describeAfterEachPhase(op) map (" " + _ + "\n") mkString
-
- def printAfterEachPhase[T](op: => T): Unit =
- describeAfterEachPhase(op) foreach (m => println(" " + m))
-
// ------------ Invalidations ---------------------------------
/** Is given package class a system package class that cannot be invalidated?
@@ -885,8 +810,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Invalidates packages that contain classes defined in a classpath entry, and
* rescans that entry.
- * @param path A fully qualified name that refers to a directory or jar file that's
- * an entry on the classpath.
+ * @param paths Fully qualified names that refer to directories or jar files that are
+ * a entries on the classpath.
* First, causes the classpath entry referred to by `path` to be rescanned, so that
* any new files or deleted files or changes in subpackages are picked up.
* Second, invalidates any packages for which one of the following considitions is met:
@@ -1074,7 +999,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
object typeDeconstruct extends {
val global: Global.this.type = Global.this
- } with interpreter.StructuredTypeStrings
+ } with typechecker.StructuredTypeStrings
/** There are common error conditions where when the exception hits
* here, currentRun.currentUnit is null. This robs us of the knowledge
@@ -1094,40 +1019,37 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
- // TODO - trim these to the absolute minimum.
- @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
- @inline final def afterPostErasure[T](op: => T): T = afterPhase(currentRun.posterasurePhase)(op)
- @inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op)
- @inline final def afterFlatten[T](op: => T): T = afterPhase(currentRun.flattenPhase)(op)
- @inline final def afterIcode[T](op: => T): T = afterPhase(currentRun.icodePhase)(op)
- @inline final def afterMixin[T](op: => T): T = afterPhase(currentRun.mixinPhase)(op)
- @inline final def afterPickler[T](op: => T): T = afterPhase(currentRun.picklerPhase)(op)
- @inline final def afterRefchecks[T](op: => T): T = afterPhase(currentRun.refchecksPhase)(op)
- @inline final def afterSpecialize[T](op: => T): T = afterPhase(currentRun.specializePhase)(op)
- @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op)
- @inline final def afterUncurry[T](op: => T): T = afterPhase(currentRun.uncurryPhase)(op)
- @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op)
- @inline final def beforeExplicitOuter[T](op: => T): T = beforePhase(currentRun.explicitouterPhase)(op)
- @inline final def beforeFlatten[T](op: => T): T = beforePhase(currentRun.flattenPhase)(op)
- @inline final def beforeIcode[T](op: => T): T = beforePhase(currentRun.icodePhase)(op)
- @inline final def beforeMixin[T](op: => T): T = beforePhase(currentRun.mixinPhase)(op)
- @inline final def beforePickler[T](op: => T): T = beforePhase(currentRun.picklerPhase)(op)
- @inline final def beforeRefchecks[T](op: => T): T = beforePhase(currentRun.refchecksPhase)(op)
- @inline final def beforeSpecialize[T](op: => T): T = beforePhase(currentRun.specializePhase)(op)
- @inline final def beforeTyper[T](op: => T): T = beforePhase(currentRun.typerPhase)(op)
- @inline final def beforeUncurry[T](op: => T): T = beforePhase(currentRun.uncurryPhase)(op)
-
- def explainContext(c: analyzer.Context): String = (
- if (c == null) "" else (
- """| context owners: %s
- |
- |Enclosing block or template:
- |%s""".format(
- c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "),
- nodePrinters.nodeToString(c.enclClassOrMethod.tree)
- )
- )
+ def isGlobalInitialized = (
+ definitions.isDefinitionsInitialized
+ && rootMirror.isMirrorInitialized
+ )
+ override def isPastTyper = (
+ (curRun ne null)
+ && isGlobalInitialized // defense against init order issues
+ && (globalPhase.id > currentRun.typerPhase.id)
)
+
+ // TODO - trim these to the absolute minimum.
+ @inline final def exitingErasure[T](op: => T): T = exitingPhase(currentRun.erasurePhase)(op)
+ @inline final def exitingPostErasure[T](op: => T): T = exitingPhase(currentRun.posterasurePhase)(op)
+ @inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op)
+ @inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op)
+ @inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op)
+ @inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op)
+ @inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op)
+ @inline final def exitingSpecialize[T](op: => T): T = exitingPhase(currentRun.specializePhase)(op)
+ @inline final def exitingTyper[T](op: => T): T = exitingPhase(currentRun.typerPhase)(op)
+ @inline final def exitingUncurry[T](op: => T): T = exitingPhase(currentRun.uncurryPhase)(op)
+ @inline final def enteringErasure[T](op: => T): T = enteringPhase(currentRun.erasurePhase)(op)
+ @inline final def enteringExplicitOuter[T](op: => T): T = enteringPhase(currentRun.explicitouterPhase)(op)
+ @inline final def enteringFlatten[T](op: => T): T = enteringPhase(currentRun.flattenPhase)(op)
+ @inline final def enteringIcode[T](op: => T): T = enteringPhase(currentRun.icodePhase)(op)
+ @inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op)
+ @inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op)
+ @inline final def enteringRefchecks[T](op: => T): T = enteringPhase(currentRun.refchecksPhase)(op)
+ @inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op)
+ @inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op)
+
// Owners up to and including the first package class.
private def ownerChainString(sym: Symbol): String = (
if (sym == null) ""
@@ -1140,9 +1062,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
)
- def explainTree(t: Tree): String = formatExplain(
- )
-
/** Don't want to introduce new errors trying to report errors,
* so swallow exceptions.
*/
@@ -1156,7 +1075,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val info1 = formatExplain(
"while compiling" -> currentSource.path,
- "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, atPhase=%s".format(globalPhase, phase) ),
+ "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, enteringPhase=%s".format(globalPhase, phase) ),
"library version" -> scala.util.Properties.versionString,
"compiler version" -> Properties.versionString,
"reconstructed args" -> settings.recreateArgs.mkString(" ")
@@ -1172,7 +1091,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val info3: List[String] = (
( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) )
++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) )
- ++ ( if (!opt.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
+ ++ ( if (!settings.debug.value) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
++ ( List(errorMessage) )
)
@@ -1188,7 +1107,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def echoPhaseSummary(ph: Phase) = {
/** Only output a summary message under debug if we aren't echoing each file. */
- if (opt.debug && !opt.echoFilenames)
+ if (settings.debug.value && !(settings.verbose.value || currentRun.size < 5))
inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]")
}
@@ -1199,14 +1118,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (option.value) reporter.warning(pos, msg)
else if (!(warnings contains pos)) warnings += ((pos, msg))
def summarize() =
- if (option.isDefault && warnings.nonEmpty)
- reporter.warning(NoPosition, "there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
+ if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings.value))
+ warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
}
- def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code))
- def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code))
- def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
- def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
+ def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def newUnitScanner(unit: CompilationUnit): UnitScanner = new UnitScanner(unit)
+ def newUnitParser(unit: CompilationUnit): UnitParser = new UnitParser(unit)
+ def newUnitParser(code: String): UnitParser = newUnitParser(newCompilationUnit(code))
/** A Run is a single execution of the compiler on a sets of units
*/
@@ -1227,9 +1147,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)
val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings)
- // for sbt's benefit
- def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList
- def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList
+ def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt
+ def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt
var reportedFeature = Set[Symbol]()
@@ -1239,9 +1158,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Have we already supplemented the error message of a compiler crash? */
private[nsc] final var supplementedError = false
- /** To be initialized from firstPhase. */
- private var terminalPhase: Phase = NoPhase
-
private val unitbuf = new mutable.ListBuffer[CompilationUnit]
val compiledFiles = new mutable.HashSet[String]
@@ -1291,7 +1207,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// Flush the cache in the terminal phase: the chain could have been built
// before without being used. (This happens in the interpreter.)
- terminal.reset
+ terminal.reset()
// Each subcomponent supplies a phase, which are chained together.
// If -Ystop:phase is given, neither that phase nor any beyond it is added.
@@ -1347,8 +1263,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// this handler should not be nessasary, but it seems that `fsc`
// eats exceptions if they appear here. Need to find out the cause for
// this and fix it.
- inform("[reset] exception happened: "+ex);
- ex.printStackTrace();
+ inform("[reset] exception happened: "+ex)
+ ex.printStackTrace()
throw ex
}
@@ -1374,14 +1290,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def advancePhase() {
unitc = 0
phasec += 1
- refreshProgress
+ refreshProgress()
}
/** take note that a phase on a unit is completed
* (for progress reporting)
*/
def advanceUnit() {
unitc += 1
- refreshProgress
+ refreshProgress()
}
def cancel() { reporter.cancelled = true }
@@ -1402,7 +1318,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val namerPhase = phaseNamed("namer")
// val packageobjectsPhase = phaseNamed("packageobjects")
val typerPhase = phaseNamed("typer")
- val inlineclassesPhase = phaseNamed("inlineclasses")
+ // val inlineclassesPhase = phaseNamed("inlineclasses")
// val superaccessorsPhase = phaseNamed("superaccessors")
val picklerPhase = phaseNamed("pickler")
val refchecksPhase = phaseNamed("refchecks")
@@ -1415,22 +1331,19 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val erasurePhase = phaseNamed("erasure")
val posterasurePhase = phaseNamed("posterasure")
// val lazyvalsPhase = phaseNamed("lazyvals")
- val lambdaliftPhase = phaseNamed("lambdalift")
+ // val lambdaliftPhase = phaseNamed("lambdalift")
// val constructorsPhase = phaseNamed("constructors")
val flattenPhase = phaseNamed("flatten")
val mixinPhase = phaseNamed("mixin")
val cleanupPhase = phaseNamed("cleanup")
val icodePhase = phaseNamed("icode")
val inlinerPhase = phaseNamed("inliner")
- val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
+ val inlineExceptionHandlersPhase = phaseNamed("inlinehandlers")
val closelimPhase = phaseNamed("closelim")
val dcePhase = phaseNamed("dce")
- val jvmPhase = phaseNamed("jvm")
- // val msilPhase = phaseNamed("msil")
+ // val jvmPhase = phaseNamed("jvm")
def runIsAt(ph: Phase) = globalPhase.id == ph.id
- def runIsPast(ph: Phase) = globalPhase.id > ph.id
- // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase))
def runIsAtOptimiz = {
runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given.
runIsAt(inlineExceptionHandlersPhase) ||
@@ -1473,7 +1386,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass)
+ else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1494,13 +1407,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (canCheck) {
phase = globalPhase
- if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes
- else treeChecker.checkTrees
+ if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes()
+ else treeChecker.checkTrees()
}
}
- private def showMembers() =
- opt.showNames foreach (x => showDef(x, opt.declsOnly, globalPhase))
+ private def showMembers() = {
+ // Allows for syntax like scalac -Xshow-class Random@erasure,typer
+ def splitClassAndPhase(str: String, term: Boolean): Name = {
+ def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
+ (str indexOf '@') match {
+ case -1 => mkName(str)
+ case idx =>
+ val phasePart = str drop (idx + 1)
+ settings.Yshow.tryToSetColon(phasePart split ',' toList)
+ mkName(str take idx)
+ }
+ }
+ if (settings.Xshowcls.isSetByUser)
+ showDef(splitClassAndPhase(settings.Xshowcls.value, term = false), declsOnly = false, globalPhase)
+
+ if (settings.Xshowobj.isSetByUser)
+ showDef(splitClassAndPhase(settings.Xshowobj.value, term = true), declsOnly = false, globalPhase)
+ }
// Similarly, this will only be created under -Yshow-syms.
object trackerFactory extends SymbolTrackers {
@@ -1508,7 +1437,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x))
def snapshot() = {
inform("\n[[symbol layout at end of " + phase + "]]")
- afterPhase(phase) {
+ exitingPhase(phase) {
trackers foreach { t =>
t.snapshot()
inform(t.show("Heading from " + phase.prev.name + " to " + phase.name))
@@ -1518,6 +1447,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
def reportCompileErrors() {
+ if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings.value)
+ globalError("No warnings can be incurred under -Xfatal-warnings.")
+
if (reporter.hasErrors) {
for ((sym, file) <- symSource.iterator) {
sym.reset(new loaders.SourcefileLoader(file))
@@ -1526,7 +1458,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
else {
- allConditionalWarnings foreach (_.summarize)
+ allConditionalWarnings foreach (_.summarize())
if (seenMacroExpansionsFallingBack)
warning("some macros could not be expanded and code fell back to overridden methods;"+
@@ -1536,9 +1468,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** Compile list of source files */
- def compileSources(_sources: List[SourceFile]) {
- val depSources = dependencyAnalysis calculateFiles _sources.distinct
- val sources = coreClassesFirst(depSources)
+ def compileSources(sources: List[SourceFile]) {
// there is a problem already, e.g. a plugin was passed a bad option
if (reporter.hasErrors)
return
@@ -1556,12 +1486,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
try compileUnitsInternal(units, fromPhase)
catch { case ex: Throwable =>
- val shown = if (settings.verbose.value) {
- val pw = new java.io.PrintWriter(new java.io.StringWriter)
- ex.printStackTrace(pw)
- pw.toString
- } else ex.getClass.getName
- // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc
+ val shown = if (settings.verbose.value)
+ stackTraceString(ex)
+ else
+ stackTraceHeadString(ex) // note that error stacktraces do not print in fsc
+
globalError(supplementErrorMessage("uncaught exception during compilation: " + shown))
throw ex
}
@@ -1580,67 +1509,66 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
while (globalPhase.hasNext && !reporter.hasErrors) {
val startTime = currentTime
phase = globalPhase
- globalPhase.run
+ globalPhase.run()
// progress update
informTime(globalPhase.description, startTime)
-
- if (opt.writeICodeAtICode || (opt.printPhase && runIsAtOptimiz)) {
+ val shouldWriteIcode = (
+ (settings.writeICode.isSetByUser && (settings.writeICode containsPhase globalPhase))
+ || (!settings.Xprint.doAllPhases && (settings.Xprint containsPhase globalPhase) && runIsAtOptimiz)
+ )
+ if (shouldWriteIcode) {
// Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given.
writeICode()
- } else if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+ } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate.value && runIsAt(cleanupPhase)) {
// print trees
- if (opt.showTrees) nodePrinters.printAll()
+ if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) nodePrinters.printAll()
else printAllUnits()
}
// print the symbols presently attached to AST nodes
- if (opt.showSymbols)
+ if (settings.Yshowsyms.value)
trackerFactory.snapshot()
// print members
- if (opt.showPhase)
+ if (settings.Yshow containsPhase globalPhase)
showMembers()
// browse trees with swing tree viewer
- if (opt.browsePhase)
+ if (settings.browse containsPhase globalPhase)
treeBrowser browse (phase.name, units)
// move the pointer
globalPhase = globalPhase.next
// run tree/icode checkers
- if (opt.checkPhase)
+ if (settings.check containsPhase globalPhase.prev)
runCheckers()
// output collected statistics
- if (opt.printStats)
+ if (settings.Ystatistics.value)
statistics.print(phase)
- advancePhase
+ advancePhase()
}
if (traceSymbolActivity)
units map (_.body) foreach (traceSymbols recordSymbolsInTree _)
// In case no phase was specified for -Xshow-class/object, show it now for sure.
- if (opt.noShow)
+ if (settings.Yshow.isDefault)
showMembers()
reportCompileErrors()
symSource.keys foreach (x => resetPackageClass(x.owner))
informTime("total", startTime)
- // record dependency data
- if (!dependencyAnalysis.off)
- dependencyAnalysis.saveDependencyAnalysis()
-
// Clear any sets or maps created via perRunCaches.
perRunCaches.clearAll()
// Reset project
if (!stopPhase("namer")) {
- atPhase(namerPhase) {
+ enteringPhase(namerPhase) {
resetProjectClasses(RootClass)
}
}
@@ -1656,7 +1584,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compile(filenames: List[String]) {
try {
val sources: List[SourceFile] =
- if (isScriptRun && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
+ if (settings.script.isSetByUser && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
else filenames map getSourceFile
compileSources(sources)
@@ -1680,8 +1608,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary
val maxId = math.max(globalPhase.id, typerPhase.id)
firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
- atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
- refreshProgress
+ enteringPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
+ refreshProgress()
}
}
@@ -1689,56 +1617,16 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
* is needed for?)
*/
private def resetPackageClass(pclazz: Symbol) {
- atPhase(firstPhase) {
- pclazz.setInfo(atPhase(typerPhase)(pclazz.info))
+ enteringPhase(firstPhase) {
+ pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
}
if (!pclazz.isRoot) resetPackageClass(pclazz.owner)
}
-
- /**
- * Re-orders the source files to
- * 1. This Space Intentionally Left Blank
- * 2. LowPriorityImplicits / EmbeddedControls (i.e. parents of Predef)
- * 3. the rest
- *
- * 1 is to avoid cyclic reference errors.
- * 2 is due to the following. When completing "Predef" (*), typedIdent is called
- * for its parents (e.g. "LowPriorityImplicits"). typedIdent checks whether
- * the symbol reallyExists, which tests if the type of the symbol after running
- * its completer is != NoType.
- * If the "namer" phase has not yet run for "LowPriorityImplicits", the symbol
- * has a SourcefileLoader as type. Calling "doComplete" on it does nothing at
- * all, because the source file is part of the files to be compiled anyway.
- * So the "reallyExists" test will return "false".
- * Only after the namer, the symbol has a lazy type which actually computes
- * the info, and "reallyExists" behaves as expected.
- * So we need to make sure that the "namer" phase is run on predef's parents
- * before running it on predef.
- *
- * (*) Predef is completed early when calling "mkAttributedRef" during the
- * addition of "import Predef._" to sourcefiles. So this situation can't
- * happen for user classes.
- *
- */
- private def coreClassesFirst(files: List[SourceFile]) = {
- val goLast = 4
- def rank(f: SourceFile) = {
- if (f.file.container.name != "scala") goLast
- else f.file.name match {
- case "LowPriorityImplicits.scala" => 2
- case "StandardEmbeddings.scala" => 2
- case "EmbeddedControls.scala" => 2
- case "Predef.scala" => 3 /* Predef.scala before Any.scala, etc. */
- case _ => goLast
- }
- }
- files sortBy rank
- }
} // class Run
def printAllUnits() {
print("[[syntax trees at end of %25s]]".format(phase))
- afterPhase(phase)(currentRun.units foreach { unit =>
+ exitingPhase(phase)(currentRun.units foreach { unit =>
nodePrinters showUnit unit
})
}
@@ -1747,7 +1635,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
*/
def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = {
val boringOwners = Set[Symbol](definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
- def phased[T](body: => T): T = afterPhase(ph)(body)
+ def phased[T](body: => T): T = exitingPhase(ph)(body)
def boringMember(sym: Symbol) = boringOwners(sym.owner)
def symString(sym: Symbol) = if (sym.isTerm) sym.defString else sym.toString
@@ -1793,7 +1681,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val printer = new icodes.TextPrinter(null, icodes.linearizer)
icodes.classes.values.foreach((cls) => {
val suffix = if (cls.symbol.hasModuleFlag) "$.icode" else ".icode"
- var file = getFile(cls.symbol, suffix)
+ val file = getFile(cls.symbol, suffix)
// if (file.exists())
// file = new File(file.getParentFile(), file.getName() + "1")
try {
@@ -1803,25 +1691,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
informProgress("wrote " + file)
} catch {
case ex: IOException =>
- if (opt.debug) ex.printStackTrace()
+ if (settings.debug.value) ex.printStackTrace()
globalError("could not write file " + file)
}
})
}
- // In order to not outright break code which overrides onlyPresentation (like sbt 0.7.5.RC0)
- // I restored and deprecated it. That would be enough to avoid the compilation
- // failure, but the override wouldn't accomplish anything. So now forInteractive
- // and forScaladoc default to onlyPresentation, which is the same as defaulting
- // to false except in old code. The downside is that this leaves us calling a
- // deprecated method: but I see no simple way out, so I leave it for now.
- def forJVM = opt.jvm
- override def forMSIL = opt.msil
- def forInteractive = onlyPresentation
- def forScaladoc = onlyPresentation
def createJavadoc = false
-
- @deprecated("Use forInteractive or forScaladoc, depending on what you're after", "2.9.0")
- def onlyPresentation = false
}
object Global {
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 7d112dfb3e..00c6c37dfd 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -2,80 +2,24 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
-package scala.tools.nsc
-
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.msilLibPath
+package scala.tools
+package nsc
/** The main class for NSC, a compiler for the programming
- * language Scala.
+ * language Scala.
*/
-object Main extends Driver with EvalLoop {
-
- def resident(compiler: Global) {
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args, new Settings(scalacError))
- compiler.reporter.reset()
- new compiler.Run() compile command.files
- }
+class MainClass extends Driver with EvalLoop {
+ def resident(compiler: Global): Unit = loop { line =>
+ val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError))
+ compiler.reporter.reset()
+ new compiler.Run() compile command.files
}
- override def processSettingsHook(): Boolean =
- if (settings.Yidedebug.value) {
- settings.Xprintpos.value = true
- settings.Yrangepos.value = true
- val compiler = new interactive.Global(settings, reporter)
- import compiler.{ reporter => _, _ }
-
- val sfs = command.files map getSourceFile
- val reloaded = new interactive.Response[Unit]
- askReload(sfs, reloaded)
-
- reloaded.get.right.toOption match {
- case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
- case None => reporter.reset() // Causes other compiler errors to be ignored
- }
- askShutdown
- false
- }
- else if (settings.Ybuilderdebug.value != "none") {
- def fileSet(files : List[String]) = Set.empty ++ (files map AbstractFile.getFile)
-
- val buildManager = settings.Ybuilderdebug.value match {
- case "simple" => new SimpleBuildManager(settings)
- case _ => new RefinedBuildManager(settings)
- }
- buildManager.addSourceFiles(fileSet(command.files))
-
- // enter resident mode
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args.toList, settings)
- buildManager.update(fileSet(command.files), Set.empty)
- }
- false
- }
- else {
- if (settings.target.value == "msil")
- msilLibPath foreach (x => settings.assemrefs.value += (pathSeparator + x))
- true
- }
-
- override def newCompiler(): Global =
- if (settings.Yrangepos.value) new Global(settings, reporter) with interactive.RangePositions
- else Global(settings, reporter)
-
+ override def newCompiler(): Global = Global(settings, reporter)
override def doCompile(compiler: Global) {
- if (settings.resident.value)
- resident(compiler)
+ if (settings.resident.value) resident(compiler)
else super.doCompile(compiler)
}
}
+
+object Main extends MainClass { }
diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala
index f18ff19d7d..03190a63f3 100644
--- a/src/compiler/scala/tools/nsc/MainBench.scala
+++ b/src/compiler/scala/tools/nsc/MainBench.scala
@@ -5,28 +5,20 @@
package scala.tools.nsc
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
import scala.reflect.internal.util.Statistics
/** The main class for NSC, a compiler for the programming
* language Scala.
*/
object MainBench extends Driver with EvalLoop {
-
+
lazy val theCompiler = Global(settings, reporter)
-
+
override def newCompiler() = theCompiler
-
+
val NIter = 50
val NBest = 10
-
+
override def main(args: Array[String]) = {
val times = new Array[Long](NIter)
var start = System.nanoTime()
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 50cd51d486..584805b37e 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -21,11 +21,11 @@ object MainTokenMetric {
var totale = 0
for (source <- fnames) {
val s = new UnitScanner(new CompilationUnit(compiler.getSourceFile(source)))
- s.nextToken
+ s.nextToken()
var i = 0
while (s.token != EOF) {
i += 1
- s.nextToken
+ s.nextToken()
}
Console.println(i.toString + " " + source.toString())
totale += i
@@ -43,8 +43,8 @@ object MainTokenMetric {
} catch {
case ex @ FatalError(msg) =>
if (command.settings.debug.value)
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index f5123513c4..95264aeda6 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -8,15 +8,9 @@ package scala.tools.nsc
import java.net.URL
import util.ScalaClassLoader
-import java.lang.reflect.InvocationTargetException
import util.Exceptional.unwrap
trait CommonRunner {
- /** Check whether a class with the specified name
- * exists on the specified class path. */
- def classExists(urls: List[URL], objectName: String): Boolean =
- ScalaClassLoader.classExists(urls, objectName)
-
/** Run a given object, specified by name, using a
* specified classpath and argument list.
*
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index caf6ad14cf..2ce2fb3eaa 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -26,8 +26,8 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
// instead of whatever it's supposed to be doing.
val baseDirectory = {
val pwd = System.getenv("PWD")
- if (pwd != null && !isWin) Directory(pwd)
- else Directory.Current getOrElse Directory("/")
+ if (pwd == null || isWin) Directory.Current getOrElse Directory("/")
+ else Directory(pwd)
}
currentDir.value = baseDirectory.path
}
@@ -39,7 +39,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
override def cmdName = "fsc"
override def usageMsg = (
- createUsageMsg("where possible fsc", false, x => x.isStandard && settings.isFscSpecific(x.name)) +
+ createUsageMsg("where possible fsc", shouldExplain = false, x => x.isStandard && settings.isFscSpecific(x.name)) +
"\n\nStandard scalac options also available:" +
createUsageMsg(x => x.isStandard && !settings.isFscSpecific(x.name))
)
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index cff3590b3f..ae71eb7255 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -55,7 +55,7 @@ trait PhaseAssembly {
* node object does not exist, then create it.
*/
def getNodeByPhase(phs: SubComponent): Node = {
- var node: Node = getNodeByPhase(phs.phaseName)
+ val node: Node = getNodeByPhase(phs.phaseName)
node.phaseobj match {
case None =>
node.phaseobj = Some(List[SubComponent](phs))
@@ -75,7 +75,7 @@ trait PhaseAssembly {
* list of the nodes
*/
def softConnectNodes(frm: Node, to: Node) {
- var e = new Edge(frm, to, false)
+ val e = new Edge(frm, to, false)
this.edges += e
frm.after += e
@@ -87,7 +87,7 @@ trait PhaseAssembly {
* list of the nodes
*/
def hardConnectNodes(frm: Node, to: Node) {
- var e = new Edge(frm, to, true)
+ val e = new Edge(frm, to, true)
this.edges += e
frm.after += e
@@ -164,8 +164,8 @@ trait PhaseAssembly {
} else {
- var promote = hl.to.before.filter(e => (!e.hard))
- hl.to.before.clear
+ val promote = hl.to.before.filter(e => (!e.hard))
+ hl.to.before.clear()
sanity foreach (edge => hl.to.before += edge)
for (edge <- promote) {
rerun = true
@@ -182,7 +182,7 @@ trait PhaseAssembly {
/** Remove all nodes in the given graph, that have no phase object
* Make sure to clean up all edges when removing the node object
- * <code>Inform</code> with warnings, if an external phase has a
+ * `Inform` with warnings, if an external phase has a
* dependency on something that is dropped.
*/
def removeDanglingNodes() {
@@ -245,7 +245,7 @@ trait PhaseAssembly {
for (phs <- phsSet) {
- var fromnode = graph.getNodeByPhase(phs)
+ val fromnode = graph.getNodeByPhase(phs)
phs.runsRightAfter match {
case None =>
@@ -306,7 +306,7 @@ trait PhaseAssembly {
sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#0000ff\"]\n")
}
sbuf.append("}\n")
- var out = new BufferedWriter(new FileWriter(filename))
+ val out = new BufferedWriter(new FileWriter(filename))
out.write(sbuf.toString)
out.flush()
out.close()
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
index 0901ade2d7..e379afce9b 100644
--- a/src/compiler/scala/tools/nsc/Phases.scala
+++ b/src/compiler/scala/tools/nsc/Phases.scala
@@ -5,7 +5,6 @@
package scala.tools.nsc
-import symtab.Flags
import scala.reflect.internal.util.TableDef
import scala.language.postfixOps
@@ -22,7 +21,6 @@ object Phases {
}
val values = new Array[Cell](MaxPhases + 1)
def results = values filterNot (_ == null)
- def apply(ph: Phase): T = values(ph.id).value
def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value)
}
/** A class for recording the elapsed time of each phase in the
@@ -40,7 +38,6 @@ object Phases {
>> ("ms" -> (_.value)) >+ " "
<< ("share" -> (_.value.toDouble * 100 / total formatted "%.2f"))
}
- def formatted = "" + table()
}
}
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index 55fd196716..feb4ded2f2 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -16,10 +16,6 @@ object Properties extends scala.util.PropertiesTrait {
def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
- // settings based on system properties
- def msilLibPath = propOrNone("msil.libpath")
-
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
- def fileEndings = fileEndingString.split("""\|""").toList
}
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 107c4b3df3..92b2dc79ed 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
import io.{ Directory, File, Path }
import java.io.IOException
-import java.net.URL
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
import util.Exceptional.unwrap
@@ -49,25 +48,12 @@ class ScriptRunner extends HasCompileSocket {
case x => x
}
- def isScript(settings: Settings) = settings.script.value != ""
-
/** Choose a jar filename to hold the compiled version of a script. */
private def jarFileFor(scriptFile: String)= File(
if (scriptFile endsWith ".jar") scriptFile
else scriptFile.stripSuffix(".scala") + ".jar"
)
- /** Read the entire contents of a file as a String. */
- private def contentsOfFile(filename: String) = File(filename).slurp()
-
- /** Split a fully qualified object name into a
- * package and an unqualified object name */
- private def splitObjectName(fullname: String): (Option[String], String) =
- (fullname lastIndexOf '.') match {
- case -1 => (None, fullname)
- case idx => (Some(fullname take idx), fullname drop (idx + 1))
- }
-
/** Compile a script using the fsc compilation daemon.
*/
private def compileWithDaemon(settings: GenericRunnerSettings, scriptFileIn: String) = {
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index a0468a22b9..9b8582ae02 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -47,8 +47,8 @@ abstract class SubComponent {
private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null)
private var ownPhaseRunId = global.NoRunId
- @inline final def beforeOwnPhase[T](op: => T) = global.beforePhase(ownPhase)(op)
- @inline final def afterOwnPhase[T](op: => T) = global.afterPhase(ownPhase)(op)
+ @inline final def beforeOwnPhase[T](op: => T) = global.enteringPhase(ownPhase)(op)
+ @inline final def afterOwnPhase[T](op: => T) = global.exitingPhase(ownPhase)(op)
/** The phase corresponding to this subcomponent in the current compiler run */
def ownPhase: Phase = {
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6e39fc9aa1..3397797927 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -7,10 +7,7 @@ package scala.tools.nsc
package ast
import symtab._
-import reporters._
-import scala.reflect.internal.util.{Position, NoPosition}
import util.DocStrings._
-import scala.reflect.internal.Chars._
import scala.collection.mutable
/*
@@ -129,8 +126,6 @@ trait DocComments { self: Global =>
getDocComment(sym) map getUseCases getOrElse List()
}
- def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass)
-
/** Returns the javadoc format of doc comment string `s`, including wiki expansion
*/
def toJavaDoc(s: String): String = expandWiki(s)
@@ -275,7 +270,7 @@ trait DocComments { self: Global =>
cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
case None =>
reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
- " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", force = true)
"<invalid inheritdoc annotation>"
}
@@ -313,7 +308,6 @@ trait DocComments { self: Global =>
/** Lookup definition of variable.
*
* @param vble The variable for which a definition is searched
- * @param owner The current owner in which variable definitions are searched.
* @param site The class for which doc comments are generated
*/
def lookupVariable(vble: String, site: Symbol): Option[String] = site match {
@@ -330,12 +324,12 @@ trait DocComments { self: Global =>
}
/** Expand variable occurrences in string `str`, until a fix point is reached or
- * a expandLimit is exceeded.
+ * an expandLimit is exceeded.
*
- * @param str The string to be expanded
- * @param sym The symbol for which doc comments are generated
- * @param site The class for which doc comments are generated
- * @return Expanded string
+ * @param initialStr The string to be expanded
+ * @param sym The symbol for which doc comments are generated
+ * @param site The class for which doc comments are generated
+ * @return Expanded string
*/
protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol): String = {
val expandLimit = 10
@@ -470,7 +464,7 @@ trait DocComments { self: Global =>
//val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
//val sites = (classes ::: List(pkgs.head, rootMirror.RootClass)))
//findIn(sites)
- findIn(site.ownerChain ::: List(definitions.EmptyPackage))
+ findIn(site.ownerChain ::: List(rootMirror.EmptyPackage))
}
def getType(str: String, variable: String): Type = {
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index deea4de707..602366a201 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -168,6 +168,13 @@ abstract class NodePrinters {
}
}
+ def typeApplyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("[]", "type argument")(args)
+ }
+ }
+
def treePrefix(tree: Tree) = showPosition(tree) + tree.productPrefix
def printMultiline(tree: Tree)(body: => Unit) {
printMultiline(treePrefix(tree), showAttributes(tree))(body)
@@ -203,9 +210,11 @@ abstract class NodePrinters {
showPosition(tree)
tree match {
- case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
- case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
- case Apply(fun, args) => applyCommon(tree, fun, args)
+ case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
+ case Apply(fun, args) => applyCommon(tree, fun, args)
+
+ case TypeApply(fun, args) => typeApplyCommon(tree, fun, args)
+ case AppliedTypeTree(tpt, args) => typeApplyCommon(tree, tpt, args)
case Throw(Ident(name)) =>
printSingle(tree, name)
@@ -312,11 +321,6 @@ abstract class NodePrinters {
}
case This(qual) =>
printSingle(tree, qual)
- case TypeApply(fun, args) =>
- printMultiline(tree) {
- traverse(fun)
- traverseList("[]", "type argument")(args)
- }
case tt @ TypeTree() =>
println(showTypeTree(tt))
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index d8fb632f73..63a2dd0ee7 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -1,16 +1,11 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.util.{ SourceFile, Position, OffsetPosition, NoPosition }
+import scala.reflect.internal.util.{ SourceFile, OffsetPosition }
trait Positions extends scala.reflect.internal.Positions {
self: Global =>
- def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new OffsetPosition(source, point)
-
- def validatePositions(tree: Tree) {}
-
class ValidatingPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
@@ -20,7 +15,7 @@ trait Positions extends scala.reflect.internal.Positions {
// When we prune due to encountering a position, traverse the
// pruned children so we can warn about those lacking positions.
t.children foreach { c =>
- if ((c eq EmptyTree) || (c eq emptyValDef)) ()
+ if (!c.canHaveAttrs) ()
else if (c.pos == NoPosition) {
reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
inform("parent: " + treeSymStatus(t))
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 83222a24b4..7fefb2ce0c 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package ast
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
-import symtab.Flags._
-import symtab.SymbolTable
trait Printers extends scala.reflect.internal.Printers { this: Global =>
@@ -130,7 +128,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
case Select(qualifier, name) =>
printTree(qualifier)
print(".")
- print(quotedName(name, true))
+ print(quotedName(name, decode = true))
// target.toString() ==> target.toString
case Apply(fn, Nil) => printTree(fn)
@@ -154,7 +152,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
// If thenp or elsep has only one statement, it doesn't need more than one line.
case If(cond, thenp, elsep) =>
def ifIndented(x: Tree) = {
- indent ; println() ; printTree(x) ; undent
+ indent() ; println() ; printTree(x) ; undent()
}
val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
@@ -168,12 +166,12 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
if (elseStmts.nonEmpty) {
print(" else")
- indent ; println()
+ indent() ; println()
elseStmts match {
case List(x) => printTree(x)
case _ => printTree(elsep)
}
- undent ; println()
+ undent() ; println()
}
case _ => s()
}
@@ -202,91 +200,12 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
override def printTree(tree: Tree) { print(safe(tree)) }
}
- class TreeMatchTemplate {
- // non-trees defined in Trees
- //
- // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
- // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
- //
- def apply(t: Tree): Unit = t match {
- // eliminated by typer
- case Annotated(annot, arg) =>
- case AssignOrNamedArg(lhs, rhs) =>
- case DocDef(comment, definition) =>
- case Import(expr, selectors) =>
-
- // eliminated by refchecks
- case ModuleDef(mods, name, impl) =>
- case TypeTreeWithDeferredRefCheck() =>
-
- // eliminated by erasure
- case TypeDef(mods, name, tparams, rhs) =>
- case Typed(expr, tpt) =>
-
- // eliminated by cleanup
- case ApplyDynamic(qual, args) =>
-
- // eliminated by explicitouter
- case Alternative(trees) =>
- case Bind(name, body) =>
- case CaseDef(pat, guard, body) =>
- case Star(elem) =>
- case UnApply(fun, args) =>
-
- // eliminated by lambdalift
- case Function(vparams, body) =>
-
- // eliminated by uncurry
- case AppliedTypeTree(tpt, args) =>
- case CompoundTypeTree(templ) =>
- case ExistentialTypeTree(tpt, whereClauses) =>
- case SelectFromTypeTree(qual, selector) =>
- case SingletonTypeTree(ref) =>
- case TypeBoundsTree(lo, hi) =>
-
- // survivors
- case Apply(fun, args) =>
- case ArrayValue(elemtpt, trees) =>
- case Assign(lhs, rhs) =>
- case Block(stats, expr) =>
- case ClassDef(mods, name, tparams, impl) =>
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- case EmptyTree =>
- case Ident(name) =>
- case If(cond, thenp, elsep) =>
- case LabelDef(name, params, rhs) =>
- case Literal(value) =>
- case Match(selector, cases) =>
- case New(tpt) =>
- case PackageDef(pid, stats) =>
- case Return(expr) =>
- case Select(qualifier, selector) =>
- case Super(qual, mix) =>
- case Template(parents, self, body) =>
- case This(qual) =>
- case Throw(expr) =>
- case Try(block, catches, finalizer) =>
- case TypeApply(fun, args) =>
- case TypeTree() =>
- case ValDef(mods, name, tpt, rhs) =>
-
- // missing from the Trees comment
- case Parens(args) => // only used during parsing
- case SelectFromArray(qual, name, erasure) => // only used during erasure
- }
- }
-
def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
- def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
- def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
-
def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
- def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
- def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
override def newTreePrinter(writer: PrintWriter): TreePrinter =
if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 5c954096f4..0077ed0c84 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -16,8 +16,6 @@ import javax.swing.tree._
import scala.concurrent.Lock
import scala.text._
-import symtab.Flags._
-import symtab.SymbolTable
import scala.language.implicitConversions
/**
@@ -34,7 +32,7 @@ abstract class TreeBrowsers {
val borderSize = 10
- def create(): SwingBrowser = new SwingBrowser();
+ def create(): SwingBrowser = new SwingBrowser()
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
@@ -61,7 +59,7 @@ abstract class TreeBrowsers {
frame.createFrame(lock)
// wait for the frame to be closed
- lock.acquire
+ lock.acquire()
t
}
@@ -83,7 +81,7 @@ abstract class TreeBrowsers {
frame.createFrame(lock)
// wait for the frame to be closed
- lock.acquire
+ lock.acquire()
}
}
@@ -171,8 +169,8 @@ abstract class TreeBrowsers {
_setExpansionState(root, new TreePath(root.getModel.getRoot))
}
- def expandAll(subtree: JTree) = setExpansionState(subtree, true)
- def collapseAll(subtree: JTree) = setExpansionState(subtree, false)
+ def expandAll(subtree: JTree) = setExpansionState(subtree, expand = true)
+ def collapseAll(subtree: JTree) = setExpansionState(subtree, expand = false)
/** Create a frame that displays the AST.
@@ -184,14 +182,14 @@ abstract class TreeBrowsers {
* especially symbols/types would change while the window is visible.
*/
def createFrame(lock: Lock): Unit = {
- lock.acquire // keep the lock until the user closes the window
+ lock.acquire() // keep the lock until the user closes the window
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
frame.addWindowListener(new WindowAdapter() {
/** Release the lock, so compilation may resume after the window is closed. */
- override def windowClosed(e: WindowEvent): Unit = lock.release
- });
+ override def windowClosed(e: WindowEvent): Unit = lock.release()
+ })
jTree = new JTree(treeModel) {
/** Return the string for a tree node. */
@@ -253,7 +251,7 @@ abstract class TreeBrowsers {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false))
override def actionPerformed(e: ActionEvent) {
closeWindow()
- global.currentRun.cancel
+ global.currentRun.cancel()
}
}
)
@@ -509,7 +507,7 @@ abstract class TreeBrowsers {
/** Return a textual representation of this t's symbol */
def symbolText(t: Tree): String = {
val prefix =
- if (t.hasSymbol) "[has] "
+ if (t.hasSymbolField) "[has] "
else if (t.isDef) "[defines] "
else ""
@@ -529,11 +527,10 @@ abstract class TreeBrowsers {
* attributes */
def symbolAttributes(t: Tree): String = {
val s = t.symbol
- var att = ""
if ((s ne null) && (s != NoSymbol)) {
- var str = flagsToString(s.flags)
- if (s.isStaticMember) str = str + " isStatic ";
+ var str = s.flagString
+ if (s.isStaticMember) str = str + " isStatic "
(str + " annotations: " + s.annotations.mkString("", " ", "")
+ (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 9a5b92e795..7460d1ab31 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -21,7 +21,6 @@ trait TreeDSL {
import global._
import definitions._
- import gen.{ scalaDot }
object CODE {
// Add a null check to a Tree => Tree function
@@ -31,24 +30,17 @@ trait TreeDSL {
def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f)
object LIT extends (Any => Literal) {
+ def typed(x: Any) = apply(x) setType ConstantType(Constant(x))
def apply(x: Any) = Literal(Constant(x))
def unapply(x: Any) = condOpt(x) { case Literal(Constant(value)) => value }
}
- // You might think these could all be vals, but empirically I have found that
- // at least in the case of UNIT the compiler breaks if you re-use trees.
- // However we need stable identifiers to have attractive pattern matching.
- // So it's inconsistent until I devise a better way.
- val TRUE = LIT(true)
- val FALSE = LIT(false)
- val ZERO = LIT(0)
- def NULL = LIT(null)
- def UNIT = LIT(())
-
- // for those preferring boring, predictable lives, without the thrills of tree-sharing
- // (but with the perk of typed trees)
- def TRUE_typed = LIT(true) setType ConstantType(Constant(true))
- def FALSE_typed = LIT(false) setType ConstantType(Constant(false))
+ // Boring, predictable trees.
+ def TRUE = LIT typed true
+ def FALSE = LIT typed false
+ def ZERO = LIT(0)
+ def NULL = LIT(null)
+ def UNIT = LIT(())
object WILD {
def empty = Ident(nme.WILDCARD)
@@ -85,16 +77,12 @@ trait TreeDSL {
def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe)
def ANY_== (other: Tree) = fn(target, Any_==, other)
def ANY_!= (other: Tree) = fn(target, Any_!=, other)
- def OBJ_== (other: Tree) = fn(target, Object_==, other)
def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
def OBJ_EQ (other: Tree) = fn(target, Object_eq, other)
def OBJ_NE (other: Tree) = fn(target, Object_ne, other)
- def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
- def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
- def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
@@ -102,9 +90,6 @@ trait TreeDSL {
def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other)
def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other)
- def BOOL_&& (other: Tree) = fn(target, Boolean_and, other)
- def BOOL_|| (other: Tree) = fn(target, Boolean_or, other)
-
/** Apply, Select, Match **/
def APPLY(params: Tree*) = Apply(target, params.toList)
def APPLY(params: List[Tree]) = Apply(target, params)
@@ -114,6 +99,10 @@ trait TreeDSL {
def DOT(sym: Symbol) = SelectStart(Select(target, sym))
/** Assignment */
+ // !!! This method is responsible for some tree sharing, but a diligent
+ // reviewer pointed out that we shouldn't blindly duplicate these trees
+ // as there might be DefTrees nested beneath them. It's not entirely
+ // clear how to proceed, so for now it retains the non-duplicating behavior.
def ===(rhs: Tree) = Assign(target, rhs)
/** Methods for sequences **/
@@ -127,11 +116,9 @@ trait TreeDSL {
* See ticket #2168 for one illustration of AS vs. AS_ANY.
*/
def AS(tpe: Type) = gen.mkAsInstanceOf(target, tpe, any = true, wrapInApply = false)
- def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, true)
- def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, false)
+ def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = true)
+ def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = false)
- // XXX having some difficulty expressing nullSafe in a way that doesn't freak out value types
- // def TOSTRING() = nullSafe(fn(_: Tree, nme.toString_), LIT("null"))(target)
def TOSTRING() = fn(target, nme.toString_)
def GETCLASS() = fn(target, Object_getClass)
}
@@ -159,7 +146,6 @@ trait TreeDSL {
def mkTree(rhs: Tree): ResultTreeType
def ===(rhs: Tree): ResultTreeType
- private var _mods: Modifiers = null
private var _tpt: Tree = null
private var _pos: Position = null
@@ -167,19 +153,12 @@ trait TreeDSL {
_tpt = TypeTree(tp)
this
}
- def withFlags(flags: Long*): this.type = {
- if (_mods == null)
- _mods = defaultMods
-
- _mods = flags.foldLeft(_mods)(_ | _)
- this
- }
def withPos(pos: Position): this.type = {
_pos = pos
this
}
- final def mods = if (_mods == null) defaultMods else _mods
+ final def mods = defaultMods
final def tpt = if (_tpt == null) defaultTpt else _tpt
final def pos = if (_pos == null) defaultPos else _pos
}
@@ -199,7 +178,7 @@ trait TreeDSL {
self: VODDStart =>
type ResultTreeType = ValDef
- def mkTree(rhs: Tree): ValDef = ValDef(mods, name, tpt, rhs)
+ def mkTree(rhs: Tree): ValDef = ValDef(mods, name.toTermName, tpt, rhs)
}
trait DefCreator {
self: VODDStart =>
@@ -244,7 +223,6 @@ trait TreeDSL {
}
class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) {
def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin)
- def FINALLY(x: Tree) = Try(body, catches, x)
def ENDTRY = Try(body, catches, fin)
}
@@ -252,16 +230,9 @@ trait TreeDSL {
def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree)
class SymbolMethods(target: Symbol) {
- def BIND(body: Tree) = Bind(target, body)
- def IS_NULL() = REF(target) OBJ_EQ NULL
- def NOT_NULL() = REF(target) OBJ_NE NULL
-
- def GET() = fn(REF(target), nme.get)
-
- // name of nth indexed argument to a method (first parameter list), defaults to 1st
- def ARG(idx: Int = 0) = Ident(target.paramss.head(idx))
- def ARGS = target.paramss.head
- def ARGNAMES = ARGS map Ident
+ def IS_NULL() = REF(target) OBJ_EQ NULL
+ def GET() = fn(REF(target), nme.get)
+ def ARGS = target.paramss.head
}
/** Top level accessible. */
@@ -269,37 +240,18 @@ trait TreeDSL {
def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
- def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*)
-
- def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp
- def DEF(name: Name): DefTreeStart = new DefTreeStart(name)
def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym)
-
- def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp
- def VAL(name: Name): ValTreeStart = new ValTreeStart(name)
def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym)
- def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE
- def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE
- def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE
-
- def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY
- def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY
- def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY
-
def AND(guards: Tree*) =
if (guards.isEmpty) EmptyTree
else guards reduceLeft gen.mkAnd
- def OR(guards: Tree*) =
- if (guards.isEmpty) EmptyTree
- else guards reduceLeft gen.mkOr
-
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
def NOT(tree: Tree) = Select(tree, Boolean_not)
- def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, true))
+ def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, flattenUnary = true))
/** Typed trees from symbols. */
def THIS(sym: Symbol) = gen.mkAttributedThis(sym)
@@ -312,11 +264,6 @@ trait TreeDSL {
case List(tree) if flattenUnary => tree
case _ => Apply(TupleClass(trees.length).companionModule, trees: _*)
}
- def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => gen.scalaUnitConstr
- case List(tree) if flattenUnary => tree
- case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees)
- }
/** Implicits - some of these should probably disappear **/
implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 99b82d9746..b9eb511a9a 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -8,7 +8,6 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import symtab.SymbolTable
import scala.language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
@@ -22,7 +21,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
def mkCheckInit(tree: Tree): Tree = {
val tpe =
- if (tree.tpe != null || !tree.hasSymbol) tree.tpe
+ if (tree.tpe != null || !tree.hasSymbolField) tree.tpe
else tree.symbol.tpe
if (!global.phase.erasedTypes && settings.warnSelectNullable.value &&
@@ -52,120 +51,30 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
}
// wrap the given expression in a SoftReference so it can be gc-ed
- def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr))
+ def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) {
+ val constructor = SoftReferenceClass.info.nonPrivateMember(nme.CONSTRUCTOR).suchThat(_.paramss.flatten.size == 1)
+ NewFromConstructor(constructor, expr)
+ }
// annotate the expression with @unchecked
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
// are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
- }
- // if it's a Match, mark the selector unchecked; otherwise nothing.
- def mkUncheckedMatch(tree: Tree) = tree match {
- case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases))
- case _ => tree
- }
-
- def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) {
- // This can't be "Annotated(New(SwitchClass), expr)" because annotations
- // are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(Ident(nme.synthSwitch), expr)
- }
-
- // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
- // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
- class MatchMatcher {
- def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
-
- def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef)
-
- def apply(matchExpr: Tree): Tree = matchExpr match {
- // old-style match or virtpatmat switch
- case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, identity)
- // old-style match or virtpatmat switch
- case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
- // virtpatmat
- case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
- caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
- // optimized version of virtpatmat
- case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity)
- // optimized version of virtpatmat
- case Block(outerStats, orig@Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m))
- case other =>
- unknownTree(other)
- }
-
- def unknownTree(t: Tree): Tree = throw new MatchError(t)
- def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
-
- def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
- if (!opt.virtPatmat) cases
- else cases filter {
- case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
- case CaseDef(pat, guard, body) => true
- }
- }
-
- def mkCached(cvar: Symbol, expr: Tree): Tree = {
- val cvarRef = mkUnattributedRef(cvar)
- Block(
- List(
- If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))),
- Assign(cvarRef, expr),
- EmptyTree)),
- cvarRef
- )
+ Annotated(New(scalaDot(UncheckedClass.name), Nil), expr)
}
// Builds a tree of the form "{ lhs = rhs ; lhs }"
def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = {
- val lhsRef = mkUnattributedRef(lhs)
+ def lhsRef = if (lhs.owner.isClass) Select(This(lhs.owner), lhs) else Ident(lhs)
Block(Assign(lhsRef, rhs) :: Nil, lhsRef)
}
- def mkModuleVarDef(accessor: Symbol) = {
- val inClass = accessor.owner.isClass
- val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
-
- val mval = (
- accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
- setInfo accessor.tpe.finalResultType
- addAnnotation VolatileAttr
- )
- if (inClass)
- mval.owner.info.decls enter mval
-
- ValDef(mval)
- }
-
- // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ }
- // where (...) are eventual outer accessors
- def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) =
- DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe)))
-
- def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
- DefDef(accessor, Select(This(msym.owner), msym))
-
def newModule(accessor: Symbol, tpe: Type) = {
val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
if (ps.isEmpty) New(tpe)
else New(tpe, This(accessor.owner.enclClass))
}
- // def m: T;
- def mkModuleAccessDcl(accessor: Symbol) =
- DefDef(accessor setFlag lateDEFERRED, EmptyTree)
-
def mkRuntimeCall(meth: Name, args: List[Tree]): Tree =
mkRuntimeCall(meth, Nil, args)
@@ -206,7 +115,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
}
- def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
+ def mkSuperInitCall: Select = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
@@ -267,25 +176,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else
mkCast(tree, pt)
- def mkZeroContravariantAfterTyper(tp: Type): Tree = {
- // contravariant -- for replacing an argument in a method call
- // must use subtyping, as otherwise we miss types like `Any with Int`
- val tree =
- if (NullClass.tpe <:< tp) Literal(Constant(null))
- else if (UnitClass.tpe <:< tp) Literal(Constant())
- else if (BooleanClass.tpe <:< tp) Literal(Constant(false))
- else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f))
- else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d))
- else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte))
- else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort))
- else if (IntClass.tpe <:< tp) Literal(Constant(0))
- else if (LongClass.tpe <:< tp) Literal(Constant(0L))
- else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar))
- else mkCast(Literal(Constant(null)), tp)
-
- tree
- }
-
/** Translate names in Select/Ident nodes to type names.
*/
def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
@@ -307,7 +197,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
*/
private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = {
val packedType = typer.packedType(expr, owner)
- val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
+ val sym = owner.newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
(ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe)
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index cbbb4c8ba8..6a0f4407fc 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -6,10 +6,6 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.HasFlags
-import scala.reflect.internal.Flags._
-import symtab._
-
/** This class ...
*
* @author Martin Odersky
@@ -18,8 +14,65 @@ import symtab._
abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
val global: Global
import global._
+ import definitions._
- import definitions.ThrowableClass
+ // arg1.op(arg2) returns (arg1, op.symbol, arg2)
+ object BinaryOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case Apply(sel @ Select(arg1, _), arg2 :: Nil) => Some((arg1, sel.symbol, arg2))
+ case _ => None
+ }
+ }
+ // recv.op[T1, ...] returns (recv, op.symbol, type argument types)
+ object TypeApplyOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, List[Type])] = t match {
+ case TypeApply(sel @ Select(recv, _), targs) => Some((recv, sel.symbol, targs map (_.tpe)))
+ case _ => None
+ }
+ }
+
+ // x.asInstanceOf[T] returns (x, typeOf[T])
+ object AsInstanceOf {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(TypeApplyOp(recv, Object_asInstanceOf, tpe :: Nil), Nil) => Some((recv, tpe))
+ case _ => None
+ }
+ }
+
+ // Extractors for value classes.
+ object ValueClass {
+ def isValueClass(tpe: Type) = enteringErasure(tpe.typeSymbol.isDerivedValueClass)
+ def valueUnbox(tpe: Type) = enteringErasure(tpe.typeSymbol.derivedValueClassUnbox)
+
+ // B.unbox. Returns B.
+ object Unbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Apply(sel @ Select(ref, _), Nil) if valueUnbox(ref.tpe) == sel.symbol => Some(ref)
+ case _ => None
+ }
+ }
+ // new B(v). Returns B and v.
+ object Box {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(sel @ Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType))
+ case _ => None
+ }
+ }
+ // (new B(v)).unbox. returns v.
+ object BoxAndUnbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Unbox(Box(v, tpe)) if isValueClass(tpe) => Some(v)
+ case _ => None
+ }
+ }
+ // new B(v1) op new B(v2) where op is == or !=. Returns v1, op, v2.
+ object BoxAndCompare {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case BinaryOp(Box(v1, tpe1), op @ (Object_== | Object_!=), Box(v2, tpe2)) if isValueClass(tpe1) && tpe1 =:= tpe2 => Some((v1, op, v2))
+ case _ => None
+ }
+ }
+ }
/** Is tree legal as a member definition of an interface?
*/
@@ -42,7 +95,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
case ClassDef(_, `name`, _, _) :: Nil => true
case _ => super.firstDefinesClassOrObject(trees, name)
}
-
- def isInterface(mods: HasFlags, body: List[Tree]) =
- mods.isTrait && (body forall isInterfaceMember)
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 0a12737572..6c5c087d55 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -16,24 +16,6 @@ import scala.reflect.internal.Flags.TRAIT
import scala.compat.Platform.EOL
trait Trees extends scala.reflect.internal.Trees { self: Global =>
-
- def treeLine(t: Tree): String =
- if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
- else t.summaryString
-
- def treeStatus(t: Tree, enclosingTree: Tree = null) = {
- val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id)
-
- "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
- }
- def treeSymStatus(t: Tree) = {
- val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " "
- "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
- if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
- else treeLine(t)
- )
- }
-
// --- additional cases --------------------------------------------------------
/** Only used during parsing */
case class Parens(args: List[Tree]) extends Tree
@@ -65,6 +47,13 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// --- factory methods ----------------------------------------------------------
+ /** Factory method for a primary constructor super call `super.<init>(args_1)...(args_n)`
+ */
+ def PrimarySuperCall(argss: List[List[Tree]]): Tree = argss match {
+ case Nil => Apply(gen.mkSuperInitCall, Nil)
+ case xs :: rest => rest.foldLeft(Apply(gen.mkSuperInitCall, xs): Tree)(Apply.apply)
+ }
+
/** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
@@ -82,7 +71,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* body
* }
*/
- def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
+ def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): Template = {
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
@@ -115,11 +104,18 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
} else {
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
- vparamss1 = List() :: vparamss1;
- val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
- val superCall = (superRef /: argss) (Apply.apply)
+ vparamss1 = List() :: vparamss1
+ val superRef: Tree = atPos(superPos)(gen.mkSuperInitCall)
+ val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
+ // this requires knowing which of the parents is a type macro and which is not
+ // and that's something that cannot be found out before typer
+ // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
+ // this means that we don't know what will be the arguments of the super call
+ // therefore here we emit a dummy which gets populated when the template is named and typechecked
List(
- atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
+ // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
+ // is it going to be a problem that we can no longer include the `argss`?
+ atPos(wrappingPos(superPos, lvdefs)) (
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
@@ -137,11 +133,10 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
* @param vparamss the value parameters -- if they have symbols they
* should be owned by `sym`
- * @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
*/
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
assert(
mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
@@ -151,7 +146,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
ClassDef(sym,
Template(sym.info.parents map TypeTree,
if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, argss, body, superPos))
+ constrMods, vparamss, body, superPos))
}
// --- subcomponents --------------------------------------------------
@@ -324,6 +319,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
else
super.transform {
tree match {
+ case tree if !tree.canHaveAttrs =>
+ tree
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
@@ -331,9 +328,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
val isInferred = tpt.wasEmpty
if (refersToLocalSymbols || isInferred) {
- val dupl = tpt.duplicate
- dupl.tpe = null
- dupl
+ tpt.duplicate.clearType()
} else {
tpt
}
@@ -382,8 +377,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
}
- dupl.tpe = null
- dupl
+ dupl.clearType()
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 553a2088a6..832a9bf63e 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -10,10 +10,8 @@ import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
import scala.tools.nsc.util.CharArrayReader
-import scala.reflect.internal.util.SourceFile
-import scala.xml.{ Text, TextBuffer }
+import scala.xml.TextBuffer
import scala.xml.parsing.MarkupParserCommon
-import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
import scala.reflect.internal.Chars.{ SU, LF }
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
@@ -26,12 +24,6 @@ import scala.reflect.internal.Chars.{ SU, LF }
// I rewrote most of these, but not as yet the library versions: so if you are
// tempted to touch any of these, please be aware of that situation and try not
// to let it get any worse. -- paulp
-
-/** This trait ...
- *
- * @author Burak Emir
- * @version 1.0
- */
trait MarkupParsers {
self: Parsers =>
@@ -51,7 +43,7 @@ trait MarkupParsers {
class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends MarkupParserCommon {
- import Tokens.{ EMPTY, LBRACE, RBRACE }
+ import Tokens.{ LBRACE, RBRACE }
type PositionType = Position
type InputType = CharArrayReader
@@ -89,7 +81,7 @@ trait MarkupParsers {
var xEmbeddedBlock = false
- private var debugLastStartElement = new mutable.Stack[(Int, String)]
+ private val debugLastStartElement = new mutable.Stack[(Int, String)]
private def debugLastPos = debugLastStartElement.top._1
private def debugLastElem = debugLastStartElement.top._2
@@ -107,7 +99,7 @@ trait MarkupParsers {
*/
def xCheckEmbeddedBlock: Boolean = {
// attentions, side-effect, used in xText
- xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ xEmbeddedBlock = (ch == '{') && { nextch(); (ch != '{') }
xEmbeddedBlock
}
@@ -123,8 +115,7 @@ trait MarkupParsers {
while (isNameStart(ch)) {
val start = curOffset
val key = xName
- xEQ
- val delim = ch
+ xEQ()
val mid = curOffset
val value: Tree = ch match {
case '"' | '\'' =>
@@ -137,7 +128,7 @@ trait MarkupParsers {
}
case '{' =>
- nextch
+ nextch()
xEmbeddedExpr
case SU =>
throw TruncatedXMLControl
@@ -150,7 +141,7 @@ trait MarkupParsers {
aMap(key) = value
if (ch != '/' && ch != '>')
- xSpace
+ xSpace()
}
aMap
}
@@ -193,10 +184,10 @@ trait MarkupParsers {
* @precond ch == '&'
*/
def content_AMP(ts: ArrayBuffer[Tree]) {
- nextch
+ nextch()
val toAppend = ch match {
case '#' => // CharacterRef
- nextch
+ nextch()
val theChar = handle.text(tmppos, xCharRef)
xToken(';')
theChar
@@ -219,17 +210,14 @@ trait MarkupParsers {
/** Returns true if it encounters an end tag (without consuming it),
* appends trees to ts as side-effect.
- *
- * @param ts ...
- * @return ...
*/
private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
if (ch == '/')
return true // end tag
val toAppend = ch match {
- case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
- case '?' => nextch ; xProcInstr // PI
+ case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch() ; xProcInstr // PI
case _ => element // child node
}
@@ -246,7 +234,7 @@ trait MarkupParsers {
tmppos = o2p(curOffset)
ch match {
// end tag, cdata, comment, pi or child node
- case '<' => nextch ; if (content_LT(ts)) return ts
+ case '<' => nextch() ; if (content_LT(ts)) return ts
// either the character '{' or an embedded scala block }
case '{' => content_BRACE(tmppos, ts) // }
// EntityRef or CharRef
@@ -268,7 +256,7 @@ trait MarkupParsers {
val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
xToken("/>")
- handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree])
+ handle.element(r2p(start, start, curOffset), qname, attrMap, empty = true, new ListBuffer[Tree])
}
else { // handle content
xToken('>')
@@ -278,11 +266,11 @@ trait MarkupParsers {
debugLastStartElement.push((start, qname))
val ts = content
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
val pos = r2p(start, start, curOffset)
qname match {
case "xml:group" => handle.group(pos, ts)
- case _ => handle.element(pos, qname, attrMap, false, ts)
+ case _ => handle.element(pos, qname, attrMap, empty = false, ts)
}
}
}
@@ -297,12 +285,12 @@ trait MarkupParsers {
while (ch != SU) {
if (ch == '}') {
- if (charComingAfter(nextch) == '}') nextch
+ if (charComingAfter(nextch()) == '}') nextch()
else errorBraces()
}
buf append ch
- nextch
+ nextch()
if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
return done
}
@@ -349,12 +337,12 @@ trait MarkupParsers {
content_LT(ts)
// parse more XML ?
- if (charComingAfter(xSpaceOpt) == '<') {
- xSpaceOpt
+ if (charComingAfter(xSpaceOpt()) == '<') {
+ xSpaceOpt()
while (ch == '<') {
- nextch
+ nextch()
ts append element
- xSpaceOpt
+ xSpaceOpt()
}
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
@@ -375,7 +363,7 @@ trait MarkupParsers {
saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
handle.isPattern = true
val tree = xPattern
- xSpaceOpt
+ xSpaceOpt()
tree
}
},
@@ -410,13 +398,13 @@ trait MarkupParsers {
* | Name [S] '/' '>'
*/
def xPattern: Tree = {
- var start = curOffset
+ val start = curOffset
val qname = xName
debugLastStartElement.push((start, qname))
- xSpaceOpt
+ xSpaceOpt()
val ts = new ArrayBuffer[Tree]
- val isEmptyTag = (ch == '/') && { nextch ; true }
+ val isEmptyTag = (ch == '/') && { nextch() ; true }
xToken('>')
if (!isEmptyTag) {
@@ -426,13 +414,13 @@ trait MarkupParsers {
if (xEmbeddedBlock) ts ++= xScalaPatterns
else ch match {
case '<' => // tag
- nextch
+ nextch()
if (ch != '/') ts append xPattern // child
else return false // terminate
case '{' => // embedded Scala patterns
while (ch == '{') {
- nextch
+ nextch()
ts ++= xScalaPatterns
}
assert(!xEmbeddedBlock, "problem with embedded block")
@@ -450,7 +438,7 @@ trait MarkupParsers {
while (doPattern) { } // call until false
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
}
handle.makeXMLpat(r2p(start, start, curOffset), qname, ts)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 6f79f639b9..9218ad3330 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -9,7 +9,8 @@
package scala.tools.nsc
package ast.parser
-import scala.collection.mutable.{ListBuffer, StringBuilder}
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, StringBuilder }
import scala.reflect.internal.{ ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
import scala.reflect.internal.util.{ SourceFile, OffsetPosition }
@@ -94,7 +95,7 @@ trait ParsersCommon extends ScannersCommon {
* <ol>
* <li>
* Places all pattern variables in Bind nodes. In a pattern, for
- * identifiers <code>x</code>:<pre>
+ * identifiers `x`:<pre>
* x => x @ _
* x:T => x @ (_ : T)</pre>
* </li>
@@ -141,9 +142,9 @@ self =>
if (source.isSelfContained) () => compilationUnit()
else () => scriptBody()
- def newScanner = new SourceFileScanner(source)
+ def newScanner(): Scanner = new SourceFileScanner(source)
- val in = newScanner
+ val in = newScanner()
in.init()
private val globalFresh = new FreshNameCreator.Default
@@ -167,11 +168,10 @@ self =>
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
val global: self.global.type = self.global
- def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
}
- def xmlLiteral : Tree = xmlp.xLiteral
- def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+ def xmlLiteral() : Tree = xmlp.xLiteral
+ def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
}
class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
@@ -196,10 +196,9 @@ self =>
}
class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
+ def this(unit: global.CompilationUnit) = this(unit, Nil)
- def this(unit: global.CompilationUnit) = this(unit, List())
-
- override def newScanner = new UnitScanner(unit, patches)
+ override def newScanner() = new UnitScanner(unit, patches)
override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
@@ -219,6 +218,7 @@ self =>
try body
finally smartParsing = saved
}
+ def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches)
val syntaxErrors = new ListBuffer[(Int, String)]
def showSyntaxErrors() =
@@ -244,7 +244,7 @@ self =>
if (syntaxErrors.isEmpty) firstTry
else in.healBraces() match {
case Nil => showSyntaxErrors() ; firstTry
- case patches => new UnitParser(unit, patches).parse()
+ case patches => (this withPatches patches).parse()
}
}
}
@@ -299,11 +299,7 @@ self =>
inScalaPackage = false
currentPackage = ""
}
- private lazy val primitiveNames: Set[Name] = tpnme.ScalaValueNames.toSet
-
- private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
- private def isScalaArray(name: Name) = inScalaRootPackage && name == tpnme.Array
- private def isPrimitiveType(name: Name) = inScalaRootPackage && primitiveNames(name)
+ private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
def parseStartRule: () => Tree
@@ -380,7 +376,6 @@ self =>
* }
* }}}
*/
- import definitions._
def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
@@ -389,7 +384,7 @@ self =>
Nil,
ListOfNil,
TypeTree(),
- Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), Literal(Constant(())))
)
// def main
@@ -429,13 +424,13 @@ self =>
placeholderParams match {
case vd :: _ =>
- syntaxError(vd.pos, "unbound placeholder parameter", false)
+ syntaxError(vd.pos, "unbound placeholder parameter", skipIt = false)
placeholderParams = List()
case _ =>
}
placeholderTypes match {
case td :: _ =>
- syntaxError(td.pos, "unbound wildcard type", false)
+ syntaxError(td.pos, "unbound wildcard type", skipIt = false)
placeholderTypes = List()
case _ =>
}
@@ -468,7 +463,7 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
@@ -542,7 +537,7 @@ self =>
def accept(token: Int): Int = {
val offset = in.offset
if (in.token != token) {
- syntaxErrorOrIncomplete(expectedMsg(token), false)
+ syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
if ((token == RPAREN || token == RBRACE || token == RBRACKET))
if (in.parenBalance(token) + assumedClosingParens(token) < 0)
assumedClosingParens(token) += 1
@@ -575,9 +570,9 @@ self =>
/** Check that type parameter is not by name or repeated. */
def checkNotByNameOrVarargs(tpt: Tree) = {
if (treeInfo isByNameParamType tpt)
- syntaxError(tpt.pos, "no by-name parameter type allowed here", false)
+ syntaxError(tpt.pos, "no by-name parameter type allowed here", skipIt = false)
else if (treeInfo isRepeatedParamType tpt)
- syntaxError(tpt.pos, "no * parameter type allowed here", false)
+ syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false)
}
/** Check that tree is a legal clause of a forSome. */
@@ -586,7 +581,7 @@ self =>
ValDef(_, _, _, EmptyTree) | EmptyTree =>
;
case _ =>
- syntaxError(t.pos, "not a legal existential clause", false)
+ syntaxError(t.pos, "not a legal existential clause", skipIt = false)
}
/* -------------- TOKEN CLASSES ------------------------------------------- */
@@ -645,8 +640,6 @@ self =>
case _ => false
}
- def isTypeIntro: Boolean = isTypeIntroToken(in.token)
-
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
def isStatSep(token: Int): Boolean =
@@ -657,31 +650,10 @@ self =>
/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
- /** Join the comment associated with a definition. */
- def joinComment(trees: => List[Tree]): List[Tree] = {
- val doc = in.flushDoc
- if ((doc ne null) && doc.raw.length > 0) {
- val joined = trees map {
- t =>
- DocDef(doc, t) setPos {
- if (t.pos.isDefined) {
- val pos = doc.pos.withEnd(t.pos.endOrPoint)
- // always make the position transparent
- pos.makeTransparent
- } else {
- t.pos
- }
- }
- }
- joined.find(_.pos.isOpaqueRange) foreach {
- main =>
- val mains = List(main)
- joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
- }
- joined
- }
- else trees
- }
+ /** A hook for joining the comment associated with a definition.
+ * Overridden by scaladoc.
+ */
+ def joinComment(trees: => List[Tree]): List[Tree] = trees
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
@@ -708,12 +680,12 @@ self =>
tree match {
case Ident(name) =>
removeAsPlaceholder(name)
- makeParam(name, TypeTree() setPos o2p(tree.pos.endOrPoint))
+ makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.endOrPoint))
case Typed(Ident(name), tpe) if tpe.isType => // get the ident!
removeAsPlaceholder(name)
- makeParam(name, tpe)
+ makeParam(name.toTermName, tpe)
case _ =>
- syntaxError(tree.pos, "not a legal formal parameter", false)
+ syntaxError(tree.pos, "not a legal formal parameter", skipIt = false)
makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.endOrPoint))
}
}
@@ -721,7 +693,7 @@ self =>
/** Convert (qual)ident to type identifier. */
def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) {
convertToTypeName(tree) getOrElse {
- syntaxError(tree.pos, "identifier expected", false)
+ syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
@@ -770,14 +742,10 @@ self =>
}
}
- def checkSize(kind: String, size: Int, max: Int) {
- if (size > max) syntaxError("too many "+kind+", maximum = "+max, false)
- }
-
def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
if (treeInfo.isLeftAssoc(op) != leftAssoc)
syntaxError(
- offset, "left- and right-associative operators with same precedence may not be mixed", false)
+ offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
var top = top0
@@ -794,7 +762,7 @@ self =>
val rPos = top.pos
val end = if (rPos.isDefined) rPos.endOrPoint else opPos.endOrPoint
top = atPos(start, opinfo.offset, end) {
- makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos)
+ makeBinop(isExpr, opinfo.operand, opinfo.operator.toTermName, top, opPos)
}
}
top
@@ -923,7 +891,7 @@ self =>
)
def compoundTypeRest(t: Tree): Tree = {
- var ts = new ListBuffer[Tree] += t
+ val ts = new ListBuffer[Tree] += t
while (in.token == WITH) {
in.nextToken()
ts += annotType()
@@ -1130,21 +1098,12 @@ self =>
case FALSE => false
case NULL => null
case _ =>
- syntaxErrorOrIncomplete("illegal literal", true)
+ syntaxErrorOrIncomplete("illegal literal", skipIt = true)
null
})
}
- private def stringOp(t: Tree, op: TermName) = {
- val str = in.strVal
- in.nextToken()
- if (str.length == 0) t
- else atPos(t.pos.startOrPoint) {
- Apply(Select(t, op), List(Literal(Constant(str))))
- }
- }
-
- private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
+ private def interpolatedString(inPattern: Boolean): Tree = atPos(in.offset) {
val start = in.offset
val interpolator = in.name
@@ -1160,7 +1119,7 @@ self =>
else if(in.token == LBRACE) expr()
else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) }
else {
- syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", true)
+ syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", skipIt = true)
EmptyTree
}
}
@@ -1228,15 +1187,6 @@ self =>
/* ----------- EXPRESSIONS ------------------------------------------------ */
- /** {{{
- * EqualsExpr ::= `=' Expr
- * }}}
- */
- def equalsExpr(): Tree = {
- accept(EQUALS)
- expr()
- }
-
def condExpr(): Tree = {
if (in.token == LPAREN) {
in.nextToken()
@@ -1280,7 +1230,7 @@ self =>
def expr(): Tree = expr(Local)
def expr(location: Int): Tree = {
- var savedPlaceholderParams = placeholderParams
+ val savedPlaceholderParams = placeholderParams
placeholderParams = List()
var res = expr0(location)
if (!placeholderParams.isEmpty && !isWildcard(res)) {
@@ -1330,26 +1280,24 @@ self =>
parseTry
case WHILE =>
def parseWhile = {
- val start = in.offset
atPos(in.skipToken()) {
val lname: Name = freshTermName(nme.WHILE_PREFIX)
val cond = condExpr()
newLinesOpt()
val body = expr()
- makeWhile(lname, cond, body)
+ makeWhile(lname.toTermName, cond, body)
}
}
parseWhile
case DO =>
def parseDo = {
- val start = in.offset
atPos(in.skipToken()) {
val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
val body = expr()
if (isStatSep) in.nextToken()
accept(WHILE)
val cond = condExpr()
- makeDoWhile(lname, body, cond)
+ makeDoWhile(lname.toTermName, body, cond)
}
}
parseDo
@@ -1407,7 +1355,7 @@ self =>
Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
}
} else {
- syntaxErrorOrIncomplete("`*' expected", true)
+ syntaxErrorOrIncomplete("`*' expected", skipIt = true)
}
} else if (in.token == AT) {
t = (t /: annotations(skipNewLines = false))(makeAnnotated)
@@ -1508,7 +1456,7 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name = nme.toUnaryName(rawIdent())
+ val name = nme.toUnaryName(rawIdent().toTermName)
if (name == nme.UNARY_- && isNumericLit)
simpleExprRest(atPos(in.offset)(literal(isNegated = true)), canApply = true)
else
@@ -1546,11 +1494,11 @@ self =>
val pname = freshName("x$")
in.nextToken()
val id = atPos(start) (Ident(pname))
- val param = atPos(id.pos.focus){ makeSyntheticParam(pname) }
+ val param = atPos(id.pos.focus){ makeSyntheticParam(pname.toTermName) }
placeholderParams = param :: placeholderParams
id
case LPAREN =>
- atPos(in.offset)(makeParens(commaSeparated(expr)))
+ atPos(in.offset)(makeParens(commaSeparated(expr())))
case LBRACE =>
canApply = false
blockExpr()
@@ -1559,11 +1507,11 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, argss, self, stats) = template(isTrait = false)
+ val (parents, self, stats) = template()
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- makeNew(parents, self, stats, argss, npos, cpos)
+ makeNew(parents, self, stats, npos, cpos)
case _ =>
- syntaxErrorOrIncomplete("illegal start of simple expression", true)
+ syntaxErrorOrIncomplete("illegal start of simple expression", skipIt = true)
errorTermTree
}
simpleExprRest(t, canApply = canApply)
@@ -1615,14 +1563,9 @@ self =>
* }}}
*/
def argumentExprs(): List[Tree] = {
- def args(): List[Tree] = commaSeparated {
- val maybeNamed = isIdent
- expr() match {
- case a @ Assign(id, rhs) if maybeNamed =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- }
- }
+ def args(): List[Tree] = commaSeparated(
+ if (isIdent) treeInfo.assignmentToMaybeNamedArg(expr()) else expr()
+ )
in.token match {
case LBRACE => List(blockExpr())
case LPAREN => inParens(if (in.token == RPAREN) Nil else args())
@@ -1806,7 +1749,6 @@ self =>
* }}}
*/
def pattern2(): Tree = {
- val nameOffset = in.offset
val p = pattern3()
if (in.token != AT) p
@@ -1910,7 +1852,7 @@ self =>
def simplePattern(): Tree = {
// simple diagnostics for this entry point
def badStart(): Tree = {
- syntaxErrorOrIncomplete("illegal start of simple pattern", true)
+ syntaxErrorOrIncomplete("illegal start of simple pattern", skipIt = true)
errorPatternTree
}
simplePattern(badStart)
@@ -1919,7 +1861,7 @@ self =>
val start = in.offset
in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
- var t = stableId()
+ val t = stableId()
in.token match {
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
@@ -1981,7 +1923,6 @@ self =>
/** Default entry points into some pattern contexts. */
def pattern(): Tree = noSeq.pattern()
- def patterns(): List[Tree] = noSeq.patterns()
def seqPatterns(): List[Tree] = seqOK.patterns()
def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser
def argumentPatterns(): List[Tree] = inParens {
@@ -1995,16 +1936,16 @@ self =>
/** Drop `private` modifier when followed by a qualifier.
* Contract `abstract` and `override` to ABSOVERRIDE
*/
- private def normalize(mods: Modifiers): Modifiers =
+ private def normalizeModifers(mods: Modifiers): Modifiers =
if (mods.isPrivate && mods.hasAccessBoundary)
- normalize(mods &~ Flags.PRIVATE)
+ normalizeModifers(mods &~ Flags.PRIVATE)
else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE))
- normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
+ normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
else
mods
private def addMod(mods: Modifiers, mod: Long, pos: Position): Modifiers = {
- if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", false)
+ if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", skipIt = false)
in.nextToken()
(mods | mod) withPosition (mod, pos)
}
@@ -2021,7 +1962,7 @@ self =>
if (in.token == LBRACKET) {
in.nextToken()
if (mods.hasAccessBoundary)
- syntaxError("duplicate private/protected qualifier", false)
+ syntaxError("duplicate private/protected qualifier", skipIt = false)
result = if (in.token == THIS) { in.nextToken(); mods | Flags.LOCAL }
else Modifiers(mods.flags, identForType())
accept(RBRACKET)
@@ -2044,7 +1985,7 @@ self =>
* AccessModifier ::= (private | protected) [AccessQualifier]
* }}}
*/
- def accessModifierOpt(): Modifiers = normalize {
+ def accessModifierOpt(): Modifiers = normalizeModifers {
in.token match {
case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m)))
case _ => NoMods
@@ -2058,7 +1999,7 @@ self =>
* | override
* }}}
*/
- def modifiers(): Modifiers = normalize {
+ def modifiers(): Modifiers = normalizeModifers {
def loop(mods: Modifiers): Modifiers = in.token match {
case PRIVATE | PROTECTED =>
loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in))))
@@ -2103,7 +2044,7 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, ListOfNil)
+ else New(t, Nil)
}
/* -------- PARAMETERS ------------------------------------------- */
@@ -2128,7 +2069,7 @@ self =>
var mods = Modifiers(Flags.PARAM)
if (owner.isTypeName) {
mods = modifiers() | Flags.PARAMACCESSOR
- if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
+ if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
in.token match {
case v @ (VAL | VAR) =>
mods = mods withPosition (in.token, tokenRange(in))
@@ -2153,11 +2094,11 @@ self =>
syntaxError(
in.offset,
(if (mods.isMutable) "`var'" else "`val'") +
- " parameters may not be call-by-name", false)
+ " parameters may not be call-by-name", skipIt = false)
else if (implicitmod != 0)
syntaxError(
in.offset,
- "implicit parameters may not be call-by-name", false)
+ "implicit parameters may not be call-by-name", skipIt = false)
else bynamemod = Flags.BYNAMEPARAM
}
paramType()
@@ -2169,7 +2110,7 @@ self =>
expr()
} else EmptyTree
atPos(start, if (name == nme.ERROR) start else nameOffset) {
- ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name, tpt, default)
+ ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
}
}
def paramClause(): List[ValDef] = {
@@ -2186,8 +2127,8 @@ self =>
val start = in.offset
newLineOptWhenFollowedBy(LPAREN)
if (ofCaseClass && in.token != LPAREN)
- deprecationWarning(in.lastOffset, "case classes without a parameter list have been deprecated;\n"+
- "use either case objects or case classes with `()' as parameter list.")
+ syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+
+ "use either case objects or case classes with an explicit `()' as a parameter list.")
while (implicitmod == 0 && in.token == LPAREN) {
in.nextToken()
vds += paramClause()
@@ -2198,9 +2139,9 @@ self =>
val result = vds.toList
if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) {
in.token match {
- case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", false)
+ case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", skipIt = false)
case EOF => incompleteInputError("auxiliary constructor needs non-implicit parameter list")
- case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", false)
+ case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", skipIt = false)
}
}
addEvidenceParams(owner, result, contextBounds)
@@ -2411,7 +2352,7 @@ self =>
*/
def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
- syntaxError("lazy not allowed here. Only vals can be lazy", false)
+ syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
in.token match {
case VAL =>
patDefOrDcl(pos, mods withPosition(VAL, tokenRange(in)))
@@ -2469,8 +2410,8 @@ self =>
if (newmods.isDeferred) {
trees match {
case List(ValDef(_, _, _, EmptyTree)) =>
- if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", false)
- case _ => syntaxError(p.pos, "pattern definition may not be abstract", false)
+ if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false)
+ case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false)
}
}
trees
@@ -2520,7 +2461,7 @@ self =>
* }}}
*/
def funDefOrDcl(start : Int, mods: Modifiers): Tree = {
- in.nextToken
+ in.nextToken()
if (in.token == THIS) {
atPos(start, in.skipToken()) {
val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
@@ -2626,7 +2567,6 @@ self =>
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
- val nameOffset = in.offset
val name = identForType()
// @M! a type alias as well as an abstract type may declare type parameters
val tparams = typeParamClauseOpt(name, null)
@@ -2637,7 +2577,7 @@ self =>
case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
case _ =>
- syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", true)
+ syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", skipIt = true)
EmptyTree
}
}
@@ -2658,7 +2598,7 @@ self =>
* }}}
*/
def tmplDef(pos: Int, mods: Modifiers): Tree = {
- if (mods.isLazy) syntaxError("classes cannot be lazy", false)
+ if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
in.token match {
case TRAIT =>
classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT) withPosition (Flags.TRAIT, tokenRange(in)))
@@ -2671,7 +2611,7 @@ self =>
case CASEOBJECT =>
objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/)))
case _ =>
- syntaxErrorOrIncomplete("expected start of definition", true)
+ syntaxErrorOrIncomplete("expected start of definition", skipIt = true)
EmptyTree
}
}
@@ -2683,7 +2623,7 @@ self =>
* }}}
*/
def classDef(start: Int, mods: Modifiers): ClassDef = {
- in.nextToken
+ in.nextToken()
val nameOffset = in.offset
val name = identForType()
atPos(start, if (name == tpnme.ERROR) start else nameOffset) {
@@ -2693,7 +2633,7 @@ self =>
classContextBounds = contextBoundBuf.toList
val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min
if (!classContextBounds.isEmpty && mods.isTrait) {
- syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
+ syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", skipIt = false)
classContextBounds = List()
}
val constrAnnots = constructorAnnotations()
@@ -2704,7 +2644,7 @@ self =>
if (mods.isTrait) {
if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
} else if (in.token == SUBTYPE) {
- syntaxError("classes are not allowed to be virtual", false)
+ syntaxError("classes are not allowed to be virtual", skipIt = false)
}
val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE
@@ -2723,14 +2663,14 @@ self =>
* }}}
*/
def objectDef(start: Int, mods: Modifiers): ModuleDef = {
- in.nextToken
+ in.nextToken()
val nameOffset = in.offset
val name = ident()
val tstart = in.offset
atPos(start, if (name == nme.ERROR) start else nameOffset) {
val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods
val template = templateOpt(mods1, name, NoMods, Nil, tstart)
- ModuleDef(mods1, name, template)
+ ModuleDef(mods1, name.toTermName, template)
}
}
@@ -2739,20 +2679,17 @@ self =>
* TraitParents ::= AnnotType {with AnnotType}
* }}}
*/
- def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
- val parents = new ListBuffer[Tree] += startAnnotType()
- val argss = (
- // TODO: the insertion of ListOfNil here is where "new Foo" becomes
- // indistinguishable from "new Foo()".
- if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
- else ListOfNil
- )
-
- while (in.token == WITH) {
- in.nextToken()
- parents += startAnnotType()
+ def templateParents(): List[Tree] = {
+ val parents = new ListBuffer[Tree]
+ def readAppliedParent() = {
+ val start = in.offset
+ val parent = startAnnotType()
+ val argss = if (in.token == LPAREN) multipleArgumentExprs() else Nil
+ parents += atPos(start)((parent /: argss)(Apply.apply))
}
- (parents.toList, argss)
+ readAppliedParent()
+ while (in.token == WITH) { in.nextToken(); readAppliedParent() }
+ parents.toList
}
/** {{{
@@ -2762,33 +2699,33 @@ self =>
* EarlyDef ::= Annotations Modifiers PatDef
* }}}
*/
- def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
+ def template(): (List[Tree], ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
val (self, body) = templateBody(isPre = true)
- if (in.token == WITH && self.isEmpty) {
+ if (in.token == WITH && (self eq emptyValDef)) {
val earlyDefs: List[Tree] = body flatMap {
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
case tdef @ TypeDef(mods, name, tparams, rhs) =>
List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
case stat if !stat.isEmpty =>
- syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false)
+ syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", skipIt = false)
List()
case _ => List()
}
in.nextToken()
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self1, earlyDefs ::: body1)
+ val parents = templateParents()
+ val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self1, earlyDefs ::: body1)
} else {
- (List(), ListOfNil, self, body)
+ (List(), self, body)
}
} else {
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self, body)
+ val parents = templateParents()
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self, body)
}
}
@@ -2802,15 +2739,15 @@ self =>
* }}}
*/
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
- val (parents0, argss, self, body) = (
+ val (parents0, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template(isTrait = mods.isTrait)
+ template()
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(traitParentSeen = false)
- (List(), ListOfNil, self, body)
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName)
+ (List(), self, body)
}
)
def anyrefParents() = {
@@ -2832,7 +2769,7 @@ self =>
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
Template(parents0, self, anyvalConstructor :: body)
else
- Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
+ Template(anyrefParents(), self, constrMods, vparamss, body, o2p(tstart))
}
}
@@ -2847,14 +2784,15 @@ self =>
case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
- def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
+ def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
templateBody(isPre = false)
} else {
- if (in.token == LPAREN)
- syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
- " may not have parameters", true)
+ if (in.token == LPAREN) {
+ if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", skipIt = true)
+ else abort("unexpected opening parenthesis")
+ }
(emptyValDef, List())
}
}
@@ -2903,7 +2841,6 @@ self =>
* }}}
*/
def packaging(start: Int): Tree = {
- val nameOffset = in.offset
val pkg = pkgQualId()
val stats = inBracesOrNil(topStatSeq())
makePackaging(start, pkg, stats)
@@ -2937,7 +2874,7 @@ self =>
joinComment(List(topLevelTmplDef))
case _ =>
if (!isStatSep)
- syntaxErrorOrIncomplete("expected class or object definition", true)
+ syntaxErrorOrIncomplete("expected class or object definition", skipIt = true)
Nil
})
acceptStatSepOpt()
@@ -2996,7 +2933,7 @@ self =>
} else if (isDefIntro || isModifier || in.token == AT) {
stats ++= joinComment(nonLocalDefOrDcl)
} else if (!isStatSep) {
- syntaxErrorOrIncomplete("illegal start of definition", true)
+ syntaxErrorOrIncomplete("illegal start of definition", skipIt = true)
}
acceptStatSepOpt()
}
@@ -3019,7 +2956,7 @@ self =>
syntaxErrorOrIncomplete(
"illegal start of declaration"+
(if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
- else ""), true)
+ else ""), skipIt = true)
}
if (in.token != RBRACE) acceptStatSep()
}
@@ -3089,7 +3026,7 @@ self =>
}
else {
val addendum = if (isModifier) " (no modifiers allowed here)" else ""
- syntaxErrorOrIncomplete("illegal start of statement" + addendum, true)
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum, skipIt = true)
}
}
stats.toList
@@ -3113,7 +3050,6 @@ self =>
ts ++= topStatSeq()
}
} else {
- val nameOffset = in.offset
in.flushDoc
val pkg = pkgQualId()
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index c05906c740..6ad1c50075 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -9,8 +9,9 @@ import scala.tools.nsc.util.CharArrayReader
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import Tokens._
-import scala.annotation.switch
-import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
+import scala.annotation.{ switch, tailrec }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, ArrayBuffer }
import scala.xml.Utility.{ isNameStart }
/** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon.
@@ -26,7 +27,6 @@ trait ScannersCommon {
trait ScannerCommon extends CommonTokenData {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
- def warning(off: Int, msg: String): Unit
def error (off: Int, msg: String): Unit
def incompleteInputError(off: Int, msg: String): Unit
def deprecationWarning(off: Int, msg: String): Unit
@@ -50,9 +50,6 @@ trait Scanners extends ScannersCommon {
/** Offset into source character array */
type Offset = Int
- /** An undefined offset */
- val NoOffset: Offset = -1
-
trait TokenData extends CommonTokenData {
/** the next token */
@@ -86,9 +83,70 @@ trait Scanners extends ScannersCommon {
abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
- def isAtEnd = charOffset >= buf.length
+ private var openComments = 0
+ protected def putCommentChar(): Unit = nextChar()
+
+ @tailrec private def skipLineComment(): Unit = ch match {
+ case SU | CR | LF =>
+ case _ => nextChar() ; skipLineComment()
+ }
+ private def maybeOpen() {
+ putCommentChar()
+ if (ch == '*') {
+ putCommentChar()
+ openComments += 1
+ }
+ }
+ private def maybeClose(): Boolean = {
+ putCommentChar()
+ (ch == '/') && {
+ putCommentChar()
+ openComments -= 1
+ openComments == 0
+ }
+ }
+ @tailrec final def skipNestedComments(): Unit = ch match {
+ case '/' => maybeOpen() ; skipNestedComments()
+ case '*' => if (!maybeClose()) skipNestedComments()
+ case SU => incompleteInputError("unclosed comment")
+ case _ => putCommentChar() ; skipNestedComments()
+ }
+ def skipDocComment(): Unit = skipNestedComments()
+ def skipBlockComment(): Unit = skipNestedComments()
- def flush = { charOffset = offset; nextChar(); this }
+ private def skipToCommentEnd(isLineComment: Boolean) {
+ nextChar()
+ if (isLineComment) skipLineComment()
+ else {
+ openComments = 1
+ val isDocComment = (ch == '*') && { nextChar(); true }
+ if (isDocComment) {
+ // Check for the amazing corner case of /**/
+ if (ch == '/')
+ nextChar()
+ else
+ skipDocComment()
+ }
+ else skipBlockComment()
+ }
+ }
+
+ /** @pre ch == '/'
+ * Returns true if a comment was skipped.
+ */
+ def skipComment(): Boolean = ch match {
+ case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+ case _ => false
+ }
+ def flushDoc(): DocComment = null
+
+ /** To prevent doc comments attached to expressions from leaking out of scope
+ * onto the next documentable entity, they are discarded upon passing a right
+ * brace, bracket, or parenthesis.
+ */
+ def discardDocBuffer(): Unit = ()
+
+ def isAtEnd = charOffset >= buf.length
def resume(lastCode: Int) = {
token = lastCode
@@ -98,10 +156,6 @@ trait Scanners extends ScannersCommon {
nextToken()
}
- /** the last error offset
- */
- var errOffset: Offset = NoOffset
-
/** A character buffer for literals
*/
val cbuf = new StringBuilder
@@ -139,22 +193,6 @@ trait Scanners extends ScannersCommon {
cbuf.clear()
}
- /** Should doc comments be built? */
- def buildDocs: Boolean = forScaladoc
-
- /** holder for the documentation comment
- */
- var docComment: DocComment = null
-
- def flushDoc: DocComment = {
- val ret = docComment
- docComment = null
- ret
- }
-
- protected def foundComment(value: String, start: Int, end: Int) = ()
- protected def foundDocComment(value: String, start: Int, end: Int) = ()
-
private class TokenData0 extends TokenData
/** we need one token lookahead and one token history
@@ -227,12 +265,15 @@ trait Scanners extends ScannersCommon {
case RBRACE =>
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
- if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- docComment = null
+ if (!sepRegions.isEmpty)
+ sepRegions = sepRegions.tail
+
+ discardDocBuffer()
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- docComment = null
+
+ discardDocBuffer()
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
@@ -375,7 +416,7 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
}
- fetchLT
+ fetchLT()
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | /*'<' | */
'>' | '?' | ':' | '=' | '&' |
@@ -405,14 +446,14 @@ trait Scanners extends ScannersCommon {
* there a realistic situation where one would need it?
*/
if (isDigit(ch)) {
- if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
+ if (settings.future.value) syntaxError("Non-zero numbers may not have a leading zero.")
else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
}
base = 8
}
getNumber()
}
- fetchZero
+ fetchZero()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
@@ -453,7 +494,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchDoubleQuote
+ fetchDoubleQuote()
case '\'' =>
def fetchSingleQuote() = {
nextChar()
@@ -472,7 +513,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchSingleQuote
+ fetchSingleQuote()
case '.' =>
nextChar()
if ('0' <= ch && ch <= '9') {
@@ -521,63 +562,7 @@ trait Scanners extends ScannersCommon {
nextChar()
}
}
- fetchOther
- }
- }
-
- private def skipComment(): Boolean = {
-
- if (ch == '/' || ch == '*') {
-
- val comment = new StringBuilder("/")
- def appendToComment() = comment.append(ch)
-
- if (ch == '/') {
- do {
- appendToComment()
- nextChar()
- } while ((ch != CR) && (ch != LF) && (ch != SU))
- } else {
- docComment = null
- var openComments = 1
- appendToComment()
- nextChar()
- appendToComment()
- var buildingDocComment = false
- if (ch == '*' && buildDocs) {
- buildingDocComment = true
- }
- while (openComments > 0) {
- do {
- do {
- if (ch == '/') {
- nextChar(); appendToComment()
- if (ch == '*') {
- nextChar(); appendToComment()
- openComments += 1
- }
- }
- if (ch != '*' && ch != SU) {
- nextChar(); appendToComment()
- }
- } while (ch != '*' && ch != SU)
- while (ch == '*') {
- nextChar(); appendToComment()
- }
- } while (ch != '/' && ch != SU)
- if (ch == '/') nextChar()
- else incompleteInputError("unclosed comment")
- openComments -= 1
- }
-
- if (buildingDocComment)
- foundDocComment(comment.toString, offset, charOffset - 2)
- }
-
- foundComment(comment.toString, offset, charOffset - 2)
- true
- } else {
- false
+ fetchOther()
}
}
@@ -924,7 +909,7 @@ trait Scanners extends ScannersCommon {
}
}
- def intVal: Long = intVal(false)
+ def intVal: Long = intVal(negated = false)
/** Convert current strVal, base to double value
*/
@@ -956,7 +941,7 @@ trait Scanners extends ScannersCommon {
}
}
- def floatVal: Double = floatVal(false)
+ def floatVal: Double = floatVal(negated = false)
def checkNoLetter() {
if (isIdentifierPart(ch) && ch >= ' ')
@@ -1003,9 +988,9 @@ trait Scanners extends ScannersCommon {
val c = lookahead.getc()
/** As of scala 2.11, it isn't a number unless c here is a digit, so
- * opt.future excludes the rest of the logic.
+ * settings.future.value excludes the rest of the logic.
*/
- if (opt.future && !isDigit(c))
+ if (settings.future.value && !isDigit(c))
return setStrVal()
val isDefinitelyNumber = (c: @switch) match {
@@ -1059,7 +1044,6 @@ trait Scanners extends ScannersCommon {
def syntaxError(off: Offset, msg: String) {
error(off, msg)
token = ERROR
- errOffset = off
}
/** generate an error at the current token offset
@@ -1072,7 +1056,6 @@ trait Scanners extends ScannersCommon {
def incompleteInputError(msg: String) {
incompleteInputError(offset, msg)
token = EOF
- errOffset = offset
}
override def toString() = token match {
@@ -1237,7 +1220,6 @@ trait Scanners extends ScannersCommon {
override val decodeUni: Boolean = !settings.nouescape.value
// suppress warnings, throw exception on errors
- def warning(off: Offset, msg: String): Unit = ()
def deprecationWarning(off: Offset, msg: String): Unit = ()
def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
@@ -1245,10 +1227,9 @@ trait Scanners extends ScannersCommon {
/** A scanner over a given compilation unit
*/
- class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
+ class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
@@ -1294,23 +1275,21 @@ trait Scanners extends ScannersCommon {
}
}
}
-
- override def foundComment(value: String, start: Int, end: Int) {
- val pos = new RangePosition(unit.source, start, start, end)
- unit.comment(pos, value)
- }
-
- override def foundDocComment(value: String, start: Int, end: Int) {
- val docPos = new RangePosition(unit.source, start, start, end)
- docComment = new DocComment(value, docPos)
- unit.comment(docPos, value)
- }
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+
+ /** The source code with braces and line starts annotated with [NN] showing the index */
+ private def markedSource = {
+ val code = unit.source.content
+ val braces = code.indices filter (idx => "{}\n" contains code(idx)) toSet;
+ val mapped = code.indices map (idx => if (braces(idx)) s"${code(idx)}[$idx]" else "" + code(idx))
+ mapped.mkString("")
+ }
init()
+ log(s"ParensAnalyzer for ${unit.source} of length ${unit.source.content.length}\n```\n$markedSource\n```")
/** The offset of the first token on this line, or next following line if blank
*/
@@ -1386,17 +1365,24 @@ trait Scanners extends ScannersCommon {
bpbuf += current
}
}
+ def bracePairString(bp: BracePair, indent: Int): String = {
+ val rangeString = {
+ import bp._
+ val lline = line(loff)
+ val rline = line(roff)
+ val tokens = List(lline, lindent, rline, rindent) map (n => if (n < 0) "??" else "" + n)
+ "%s:%s to %s:%s".format(tokens: _*)
+ }
+ val outer = (" " * indent) + rangeString
+ val inners = bp.nested map (bracePairString(_, indent + 2))
- def printBP(bp: BracePair, indent: Int) {
- println(" "*indent+line(bp.loff)+":"+bp.lindent+" to "+line(bp.roff)+":"+bp.rindent)
- if (bp.nested.nonEmpty)
- for (bp1 <- bp.nested) {
- printBP(bp1, indent + 2)
- }
+ if (inners.isEmpty) outer
+ else inners.mkString(outer + "\n", "\n", "")
}
-// println("lineStart = "+lineStart)//DEBUG
-// println("bracepairs = ")
-// for (bp <- bpbuf.toList) printBP(bp, 0)
+ def bpString = bpbuf.toList map ("\n" + bracePairString(_, 0)) mkString ""
+ def startString = lineStart.mkString("line starts: [", ", ", "]")
+
+ log(s"\n$startString\n$bpString")
bpbuf.toList
}
@@ -1430,18 +1416,6 @@ trait Scanners extends ScannersCommon {
else bp :: insertPatch(bps, patch)
}
- def leftColumn(offset: Int) =
- if (offset == -1) -1 else column(lineStart(line(offset)))
-
- def rightColumn(offset: Int, default: Int) =
- if (offset == -1) -1
- else {
- val rlin = line(offset)
- if (lineStart(rlin) == offset) column(offset)
- else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1))
- else default
- }
-
def insertRBrace(): List[BracePatch] = {
def insert(bps: List[BracePair]): List[BracePatch] = bps match {
case List() => patches
@@ -1456,7 +1430,7 @@ trait Scanners extends ScannersCommon {
while (lin < lineStart.length && column(lineStart(lin)) > lindent)
lin += 1
if (lin < lineStart.length) {
- val patches1 = insertPatch(patches, BracePatch(lineStart(lin), true))
+ val patches1 = insertPatch(patches, BracePatch(lineStart(lin), inserted = true))
//println("patch for "+bp+"/"+imbalanceMeasure+"/"+new ParensAnalyzer(unit, patches1).imbalanceMeasure)
/*if (improves(patches1))*/
patches1
@@ -1477,23 +1451,12 @@ trait Scanners extends ScannersCommon {
else {
val patches1 = delete(nested)
if (patches1 ne patches) patches1
- else insertPatch(patches, BracePatch(roff, false))
+ else insertPatch(patches, BracePatch(roff, inserted = false))
}
}
delete(bracePairs)
}
- def imbalanceMeasure: Int = {
- def measureList(bps: List[BracePair]): Int =
- (bps map measure).sum
- def measure(bp: BracePair): Int =
- (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested)
- measureList(bracePairs)
- }
-
- def improves(patches1: List[BracePatch]): Boolean =
- imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
-
// don't emit deprecation warnings about identifiers like `macro` or `then`
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index e8ef670222..cdcfd0b834 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -11,7 +11,6 @@ import scala.xml.{ EntityRef, Text }
import scala.xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
import scala.reflect.internal.util.StringOps.splitWhere
-import scala.language.implicitConversions
/** This class builds instance of `Tree` that represent XML.
*
@@ -133,7 +132,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
case (Some(pre), rest) => (const(pre), const(rest))
case _ => (wild, const(n))
}
- mkXML(pos, true, prepat, labpat, null, null, false, args)
+ mkXML(pos, isPattern = true, prepat, labpat, null, null, empty = false, args)
}
protected def convertToTextPat(t: Tree): Tree = t match {
@@ -169,7 +168,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
}
/** Returns (Some(prefix) | None, rest) based on position of ':' */
- def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', doDropIndex = true) match {
case Some((pre, rest)) => (Some(pre), rest)
case _ => (None, name)
}
@@ -247,7 +246,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val body = mkXML(
pos.makeTransparent,
- false,
+ isPattern = false,
const(pre),
const(newlabel),
makeSymbolicAttrs,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 8a9ce8907e..7cf5a07291 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -23,10 +23,14 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
def apply(unit: global.CompilationUnit) {
import global._
informProgress("parsing " + unit)
- unit.body =
- if (unit.isJava) new JavaUnitParser(unit).parse()
- else if (reporter.incompleteHandled) new UnitParser(unit).parse()
- else new UnitParser(unit).smartParse()
+ // if the body is already filled in, do nothing
+ // otherwise compileLate is going to overwrite bodies of synthetic source files
+ if (unit.body == EmptyTree) {
+ unit.body =
+ if (unit.isJava) new JavaUnitParser(unit).parse()
+ else if (reporter.incompleteHandled) newUnitParser(unit).parse()
+ else newUnitParser(unit).smartParse()
+ }
if (settings.Yrangepos.value && !reporter.hasErrors)
validatePositions(unit.body)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index c3fd414426..5a7dc4950d 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -6,15 +6,11 @@
package scala.tools.nsc
package ast.parser
-import scala.annotation.switch
-
/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
* as one might like because JavaTokens for no clear reason chose new numbers for
* identical token sets.
*/
abstract class Tokens {
- import scala.reflect.internal.Chars._
-
/** special tokens */
final val EMPTY = -3
final val UNDEF = -2
@@ -34,14 +30,6 @@ abstract class Tokens {
def isIdentifier(code: Int): Boolean
def isLiteral(code: Int): Boolean
- def isKeyword(code: Int): Boolean
- def isSymbol(code: Int): Boolean
-
- final def isSpace(at: Char) = at == ' ' || at == '\t'
- final def isNewLine(at: Char) = at == CR || at == LF || at == FF
- final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE
- final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0)
- final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1)
}
object Tokens extends Tokens {
@@ -52,20 +40,10 @@ object Tokens extends Tokens {
def isLiteral(code: Int) =
code >= CHARLIT && code <= INTERPOLATIONID
-
/** identifiers */
final val IDENTIFIER = 10
final val BACKQUOTED_IDENT = 11
- def isIdentifier(code: Int) =
- code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
- @switch def canBeginExpression(code: Int) = code match {
- case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
- case LBRACE|LPAREN|LBRACKET|COMMENT => true
- case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
- case NULL|THIS|TRUE|FALSE => true
- case code => isLiteral(code)
- }
+ def isIdentifier(code: Int) = code >= IDENTIFIER && code <= BACKQUOTED_IDENT // used by ide
/** keywords */
final val IF = 20
@@ -113,17 +91,6 @@ object Tokens extends Tokens {
final val MACRO = 62 // not yet used in 2.10
final val THEN = 63 // not yet used in 2.10
- def isKeyword(code: Int) =
- code >= IF && code <= LAZY
-
- @switch def isDefinition(code: Int) = code match {
- case CLASS|TRAIT|OBJECT => true
- case CASECLASS|CASEOBJECT => true
- case DEF|VAL|VAR => true
- case TYPE => true
- case _ => false
- }
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -141,9 +108,6 @@ object Tokens extends Tokens {
final val AT = 83
final val VIEWBOUND = 84
- def isSymbol(code: Int) =
- code >= COMMA && code <= VIEWBOUND
-
/** parenthesis */
final val LPAREN = 90
final val RPAREN = 91
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index cd93221c50..d70b1f4d9c 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -26,15 +26,11 @@ abstract class TreeBuilder {
def o2p(offset: Int): Position
def r2p(start: Int, point: Int, end: Int): Position
- def rootId(name: Name) = gen.rootId(name)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
- def scalaAnyValConstr = scalaDot(tpnme.AnyVal)
- def scalaAnyConstr = scalaDot(tpnme.Any)
def scalaUnitConstr = scalaDot(tpnme.Unit)
def productConstr = scalaDot(tpnme.Product)
- def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n))
def serializableConstr = scalaDot(tpnme.Serializable)
def convertToTypeName(t: Tree) = gen.convertToTypeName(t)
@@ -137,7 +133,7 @@ abstract class TreeBuilder {
def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
case Nil => Literal(Constant())
case List(tree) if flattenUnary => tree
- case _ => makeTuple(trees, false)
+ case _ => makeTuple(trees, isType = false)
}
def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
@@ -147,7 +143,7 @@ abstract class TreeBuilder {
}
def stripParens(t: Tree) = t match {
- case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, true) }
+ case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, flattenUnary = true) }
case _ => t
}
@@ -175,15 +171,10 @@ abstract class TreeBuilder {
/** Create tree representing (unencoded) binary operation expression or pattern. */
def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
- def mkNamed(args: List[Tree]) =
- if (isExpr) args map {
- case a @ Assign(id @ Ident(name), rhs) =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- } else args
+ def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args
val arguments = right match {
case Parens(args) => mkNamed(args)
- case _ => List(right)
+ case _ => List(right)
}
if (isExpr) {
if (treeInfo.isLeftAssoc(op)) {
@@ -191,7 +182,7 @@ abstract class TreeBuilder {
} else {
val x = freshTermName()
Block(
- List(ValDef(Modifiers(SYNTHETIC), x, TypeTree(), stripParens(left))),
+ List(ValDef(Modifiers(SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left))),
Apply(atPos(opPos union right.pos) { Select(stripParens(right), op.encode) }, List(Ident(x))))
}
} else {
@@ -205,20 +196,26 @@ abstract class TreeBuilder {
*/
def makeAnonymousNew(stats: List[Tree]): Tree = {
val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
- makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
+ makeNew(Nil, emptyValDef, stats1, NoPosition, NoPosition)
}
/** Create positioned tree representing an object creation <new parents { stats }
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
*/
- def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
+ def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree],
npos: Position, cpos: Position): Tree =
if (parents.isEmpty)
- makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty)
- atPos(npos union cpos) { New(parents.head, argss) }
- else {
+ makeNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
+ else if (parents.tail.isEmpty && stats.isEmpty) {
+ // `Parsers.template` no longer differentiates tpts and their argss
+ // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
+ // instead of parents = Ident(C), argss = Nil as before
+ // this change works great for things that are actually templates
+ // but in this degenerate case we need to perform postprocessing
+ val app = treeInfo.dissectApplied(parents.head)
+ atPos(npos union cpos) { New(app.callee, app.argss) }
+ } else {
val x = tpnme.ANON_CLASS_NAME
atPos(npos union cpos) {
Block(
@@ -226,12 +223,12 @@ abstract class TreeBuilder {
atPos(cpos) {
ClassDef(
Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
+ Template(parents, self, NoMods, ListOfNil, stats, cpos.focus))
}),
atPos(npos) {
New(
Ident(x) setPos npos.focus,
- ListOfNil)
+ Nil)
}
)
}
@@ -276,7 +273,7 @@ abstract class TreeBuilder {
CaseDef(condition, EmptyTree, Literal(Constant(true))),
CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
)
- val matchTree = makeVisitor(cases, false, scrutineeName)
+ val matchTree = makeVisitor(cases, checkExhaustive = false, scrutineeName)
atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
}
@@ -369,7 +366,7 @@ abstract class TreeBuilder {
body) setPos splitpos
case None =>
atPos(splitpos) {
- makeVisitor(List(CaseDef(pat, EmptyTree, body)), false)
+ makeVisitor(List(CaseDef(pat, EmptyTree, body)), checkExhaustive = false)
}
}
}
@@ -379,13 +376,6 @@ abstract class TreeBuilder {
def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
Apply(Select(qual, meth) setPos qual.pos, List(makeClosure(pos, pat, body))) setPos pos
- /** Optionally, if pattern is a `Bind`, the bound name, otherwise None.
- */
- def patternVar(pat: Tree): Option[Name] = pat match {
- case Bind(name, _) => Some(name)
- case _ => None
- }
-
/** If `pat` is not yet a `Bind` wrap it in one with a fresh name
*/
def makeBind(pat: Tree): Tree = pat match {
@@ -421,7 +411,7 @@ abstract class TreeBuilder {
ValFrom(pos, pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)) :: rest,
body)
case ValFrom(pos, pat, rhs) :: rest =>
- val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]);
+ val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq])
assert(!valeqs.isEmpty)
val rest1 = rest.drop(valeqs.length)
val pats = valeqs map { case ValEq(_, pat, _) => pat }
@@ -432,9 +422,9 @@ abstract class TreeBuilder {
val ids = (defpat1 :: defpats) map makeValue
val rhs1 = makeForYield(
List(ValFrom(pos, defpat1, rhs)),
- Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, true) }) setPos wrappingPos(pdefs))
+ Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, flattenUnary = true) }) setPos wrappingPos(pdefs))
val allpats = (pat :: pats) map (_.duplicate)
- val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, false) } , rhs1)
+ val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, isType = false) } , rhs1)
makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
case _ =>
EmptyTree //may happen for erroneous input
@@ -451,18 +441,6 @@ abstract class TreeBuilder {
def makeForYield(enums: List[Enumerator], body: Tree): Tree =
makeFor(nme.map, nme.flatMap, enums, body)
- /** Create tree for a lifted expression XX-LIFTING
- */
- def makeLifted(gs: List[ValFrom], body: Tree): Tree = {
- def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match {
- case g :: Nil => g
- case ValFrom(pos1, pat1, rhs1) :: gs2 =>
- val ValFrom(pos2, pat2, rhs2) = combine(gs2)
- ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2)))
- }
- makeForYield(List(combine(gs)), body)
- }
-
/** Create tree for a pattern alternative */
def makeAlternative(ts: List[Tree]): Tree = {
def alternatives(t: Tree): List[Tree] = t match {
@@ -497,7 +475,7 @@ abstract class TreeBuilder {
def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
val binder = freshTermName("x")
val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
- val catchDef = ValDef(NoMods, freshTermName("catchExpr"), TypeTree(), catchExpr)
+ val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
val catchFn = Ident(catchDef.name)
val body = atPos(catchExpr.pos.makeTransparent)(Block(
List(catchDef),
@@ -555,7 +533,7 @@ abstract class TreeBuilder {
rhs1,
List(
atPos(pat1.pos) {
- CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true))
+ CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, flattenUnary = true))
}
))
}
@@ -568,7 +546,7 @@ abstract class TreeBuilder {
val tmp = freshTermName()
val firstDef =
atPos(matchExpr.pos) {
- ValDef(Modifiers(PrivateLocal | SYNTHETIC | (mods.flags & LAZY)),
+ ValDef(Modifiers(PrivateLocal | SYNTHETIC | ARTIFACT | (mods.flags & LAZY)),
tmp, TypeTree(), matchExpr)
}
var cnt = 0
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index fc5d4372c5..08602f87dc 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -8,7 +8,6 @@ package backend
import io.AbstractFile
import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
-import util.ClassPath.{ JavaContext, DefaultJavaContext }
import scala.tools.util.PathResolver
trait JavaPlatform extends Platform {
@@ -39,18 +38,10 @@ trait JavaPlatform extends Platform {
// replaces the tighter abstract definition here. If we had DOT typing rules, the two
// types would be conjoined and everything would work out. Yet another reason to push for DOT.
- private def depAnalysisPhase =
- if (settings.make.isDefault) Nil
- else List(dependencyAnalysis)
-
- private def classEmitPhase =
- if (settings.target.value == "jvm-1.5-fjbg") genJVM
- else genASM
-
def platformPhases = List(
flatten, // get rid of inner classes
- classEmitPhase // generate .class files
- ) ++ depAnalysisPhase
+ genASM // generate .class files
+ )
lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
lazy val externalEqualsNumNum = getDecl(BoxesRunTimeClass, nme.equalsNumNum)
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
deleted file mode 100644
index 4493685b52..0000000000
--- a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package backend
-
-import ch.epfl.lamp.compiler.{ msil => msillib }
-import util.{ ClassPath, MsilClassPath }
-import msil.GenMSIL
-import io.{ AbstractFile, MsilFile }
-
-trait MSILPlatform extends Platform {
- import global._
- import definitions.{ ComparatorClass, BoxedNumberClass, getMember }
-
- type BinaryRepr = MsilFile
-
- if (settings.verbose.value)
- inform("[AssemRefs = " + settings.assemrefs.value + "]")
-
- // phaseName = "msil"
- object genMSIL extends {
- val global: MSILPlatform.this.global.type = MSILPlatform.this.global
- val runsAfter = List[String]("dce")
- val runsRightAfter = None
- } with GenMSIL
-
- lazy val classPath = MsilClassPath.fromSettings(settings)
- def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
- // See discussion in JavaPlatForm for why we need a cast here.
-
- /** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
- throw new UnsupportedOperationException("classpath invalidations not supported on MSIL")
-
- def platformPhases = List(
- genMSIL // generate .msil files
- )
-
- lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_)
- def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass
-
- def newClassLoader(bin: MsilFile): loaders.SymbolLoader = new loaders.MsilFileLoader(bin)
-
- /**
- * Tells whether a class should be loaded and entered into the package
- * scope. On .NET, this method returns `false` for all synthetic classes
- * (anonymous classes, implementation classes, module classes), their
- * symtab is encoded in the pickle of another class.
- */
- def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = {
- if (cls.binary.isDefined) {
- val typ = cls.binary.get.msilType
- if (typ.IsDefined(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)
- assert(attrs.length == 1, attrs.length)
- val a = attrs(0).asInstanceOf[msillib.Attribute]
- // symtab_constr takes a byte array argument (the pickle), i.e. typ has a pickle.
- // otherwise, symtab_default_constr was used, which marks typ as scala-synthetic.
- a.getConstructor() == loaders.clrTypes.SYMTAB_CONSTR
- } else true // always load non-scala types
- } else true // always load source
- }
-
- def needCompile(bin: MsilFile, src: AbstractFile) =
- false // always use compiled file on .net
-}
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 8cbb5bc980..1f9862596c 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package backend
-import scala.tools.nsc.backend.icode._
import scala.collection.{ mutable, immutable }
/** Scala primitive operations are represented as methods in `Any` and
@@ -495,8 +494,8 @@ abstract class ScalaPrimitives {
def isArraySet(code: Int): Boolean = code match {
case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET |
IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET |
- OARRAY_SET | UPDATE => true;
- case _ => false;
+ OARRAY_SET | UPDATE => true
+ case _ => false
}
/** Check whether the given code is a comparison operator */
@@ -515,7 +514,7 @@ abstract class ScalaPrimitives {
DIV | MOD => true; // binary
case OR | XOR | AND |
LSL | LSR | ASR => true; // bitwise
- case _ => false;
+ case _ => false
}
def isLogicalOp(code: Int): Boolean = code match {
@@ -565,7 +564,7 @@ abstract class ScalaPrimitives {
import definitions._
val code = getPrimitive(fun)
- def elementType = beforeTyper {
+ def elementType = enteringTyper {
val arrayParent = tpe :: tpe.parents collectFirst {
case TypeRef(_, ArrayClass, elem :: Nil) => elem
}
diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
index 798a80ea37..45ca39fee4 100644
--- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
+++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
@@ -6,8 +6,7 @@
package scala.tools.nsc
package backend
-import scala.tools.nsc.ast._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/**
* Simple implementation of a worklist algorithm. A processing
@@ -32,8 +31,6 @@ trait WorklistAlgorithm {
* Run the iterative algorithm until the worklist remains empty.
* The initializer is run once before the loop starts and should
* initialize the worklist.
- *
- * @param initWorklist ...
*/
def run(initWorklist: => Unit) = {
initWorklist
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index d50d4cd125..d772dcb6c4 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -17,7 +17,7 @@ trait BasicBlocks {
self: ICodes =>
import opcodes._
- import global.{ ifDebug, settings, log, nme }
+ import global.{ settings, debuglog, log, nme }
import nme.isExceptionResultName
/** Override Array creation for efficiency (to not go through reflection). */
@@ -122,7 +122,7 @@ trait BasicBlocks {
def closed: Boolean = hasFlag(CLOSED)
def closed_=(b: Boolean) = if (b) setFlag(CLOSED) else resetFlag(CLOSED)
- /** When set, the <code>emit</code> methods will be ignored. */
+ /** When set, the `emit` methods will be ignored. */
def ignore: Boolean = hasFlag(IGNORING)
def ignore_=(b: Boolean) = if (b) setFlag(IGNORING) else resetFlag(IGNORING)
@@ -260,13 +260,9 @@ trait BasicBlocks {
}
}
- /** Replaces <code>oldInstr</code> with <code>is</code>. It does not update
+ /** Replaces `oldInstr` with `is`. It does not update
* the position field in the newly inserted instructions, so it behaves
* differently than the one-instruction versions of this function.
- *
- * @param iold ..
- * @param is ..
- * @return ..
*/
def replaceInstruction(oldInstr: Instruction, is: List[Instruction]): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
@@ -280,17 +276,7 @@ trait BasicBlocks {
}
}
- /** Insert instructions in 'is' immediately after index 'idx'. */
- def insertAfter(idx: Int, is: List[Instruction]) {
- assert(closed, "Instructions can be replaced only after the basic block is closed")
-
- instrs = instrs.patch(idx + 1, is, 0)
- code.touched = true
- }
-
/** Removes instructions found at the given positions.
- *
- * @param positions ...
*/
def removeInstructionsAt(positions: Int*) {
assert(closed, this)
@@ -311,8 +297,6 @@ trait BasicBlocks {
}
/** Replaces all instructions found in the map.
- *
- * @param map ...
*/
def subst(map: Map[Instruction, Instruction]): Unit =
if (!closed)
@@ -344,10 +328,6 @@ trait BasicBlocks {
* is closed, which sets the DIRTYSUCCS flag.
*/
def emit(instr: Instruction, pos: Position) {
-/* if (closed) {
- print()
- Console.println("trying to emit: " + instr)
- } */
assert(!closed || ignore, this)
if (ignore) {
@@ -403,7 +383,6 @@ trait BasicBlocks {
/** Close the block */
def close() {
assert(!closed || ignore, this)
- assert(instructionList.nonEmpty, "Empty block: " + this)
if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed`
// not doing anything to this block is important...
// because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed)
@@ -413,9 +392,38 @@ trait BasicBlocks {
setFlag(DIRTYSUCCS)
instructionList = instructionList.reverse
instrs = instructionList.toArray
+ if (instructionList.isEmpty) {
+ debuglog(s"Removing empty block $this")
+ code removeBlock this
+ }
}
}
+ /**
+ * if cond is true, closes this block, entersIgnoreMode, and removes the block from
+ * its list of blocks. Used to allow a block to be started and then cancelled when it
+ * is discovered to be unreachable.
+ */
+ def killIf(cond: Boolean) {
+ if (!settings.YdisableUnreachablePrevention.value && cond) {
+ debuglog(s"Killing block $this")
+ assert(instructionList.isEmpty, s"Killing a non empty block $this")
+ // only checked under debug because fetching predecessor list is moderately expensive
+ if (settings.debug.value)
+ assert(predecessors.isEmpty, s"Killing block $this which is referred to from ${predecessors.mkString}")
+
+ close()
+ enterIgnoreMode()
+ }
+ }
+
+ /**
+ * Same as killIf but with the logic of the condition reversed
+ */
+ def killUnless(cond: Boolean) {
+ this killIf !cond
+ }
+
def open() {
assert(closed, this)
closed = false
@@ -441,11 +449,6 @@ trait BasicBlocks {
ignore = true
}
- def exitIgnoreMode() {
- assert(ignore, "Exit ignore mode when not in ignore mode: " + this)
- ignore = false
- }
-
/** Return the last instruction of this basic block. */
def lastInstruction =
if (closed) instrs(instrs.length - 1)
@@ -502,17 +505,6 @@ trait BasicBlocks {
override def hashCode = label * 41 + code.hashCode
- // Instead of it, rather use a printer
- def print() { print(java.lang.System.out) }
-
- def print(out: java.io.PrintStream) {
- out.println("block #"+label+" :")
- foreach(i => out.println(" " + i))
- out.print("Successors: ")
- successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString()))
- out.println()
- }
-
private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]")
private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]")
@@ -532,18 +524,6 @@ trait BasicBlocks {
}
object BBFlags {
- val flagMap = Map[Int, String](
- LOOP_HEADER -> "loopheader",
- IGNORING -> "ignore",
- EX_HEADER -> "exheader",
- CLOSED -> "closed",
- DIRTYSUCCS -> "dirtysuccs",
- DIRTYPREDS -> "dirtypreds"
- )
- def flagsToString(flags: Int) = {
- flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
- }
-
/** This block is a loop header (was translated from a while). */
final val LOOP_HEADER = (1 << 0)
@@ -561,4 +541,16 @@ object BBFlags {
/** Code has been changed, recompute predecessors. */
final val DIRTYPREDS = (1 << 5)
+
+ val flagMap = Map[Int, String](
+ LOOP_HEADER -> "loopheader",
+ IGNORING -> "ignore",
+ EX_HEADER -> "exheader",
+ CLOSED -> "closed",
+ DIRTYSUCCS -> "dirtysuccs",
+ DIRTYPREDS -> "dirtypreds"
+ )
+ def flagsToString(flags: Int) = {
+ flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 2cebf7ad99..7243264773 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package backend
package icode
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
/**
* Exception handlers are pieces of code that `handle` exceptions on
@@ -24,14 +24,11 @@ trait ExceptionHandlers {
class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
- private var _startBlock: BasicBlock = _;
- var finalizer: Finalizer = _;
-
- /** Needed for the MSIL backend. */
- var resultKind: TypeKind = _;
+ private var _startBlock: BasicBlock = _
+ var finalizer: Finalizer = _
def setStartBlock(b: BasicBlock) = {
- _startBlock = b;
+ _startBlock = b
b.exceptionHandlerStart = true
}
def startBlock = _startBlock
@@ -49,11 +46,11 @@ trait ExceptionHandlers {
/** The body of this exception handler. May contain 'dead' blocks (which will not
* make it into generated code because linearizers may not include them) */
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
- def addBlock(b: BasicBlock): Unit = blocks = b :: blocks;
+ def addBlock(b: BasicBlock): Unit = blocks = b :: blocks
- override def toString() = "exh_" + label + "(" + cls.simpleName + ")";
+ override def toString() = "exh_" + label + "(" + cls.simpleName + ")"
/** A standard copy constructor */
def this(other: ExceptionHandler) = {
@@ -71,10 +68,4 @@ trait ExceptionHandlers {
override def toString() = "finalizer_" + label
override def dup: Finalizer = new Finalizer(method, label, pos)
}
-
- object NoFinalizer extends Finalizer(null, newTermNameCached("<no finalizer>"), NoPosition) {
- override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block.");
- override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block.");
- override def dup = this
- }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 44d7a1929b..94116d6783 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -13,10 +13,8 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
import PartialFunction._
-import scala.language.postfixOps
-/** This class ...
- *
+/**
* @author Iulian Dragos
* @version 1.0
*/
@@ -52,14 +50,14 @@ abstract class GenICode extends SubComponent {
var unit: CompilationUnit = NoCompilationUnit
override def run() {
- scalaPrimitives.init
+ scalaPrimitives.init()
classes.clear()
super.run()
}
override def apply(unit: CompilationUnit): Unit = {
this.unit = unit
- unit.icode.clear
+ unit.icode.clear()
informProgress("Generating icode for " + unit)
gen(unit.body)
this.unit = NoCompilationUnit
@@ -93,7 +91,7 @@ abstract class GenICode extends SubComponent {
debuglog("Generating class: " + tree.symbol.fullName)
val outerClass = ctx.clazz
ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
- addClassFields(ctx, tree.symbol);
+ addClassFields(ctx, tree.symbol)
classes += (tree.symbol -> ctx.clazz)
unit.icode += ctx.clazz
gen(impl, ctx)
@@ -121,7 +119,7 @@ abstract class GenICode extends SubComponent {
m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
if (!m.isAbstractMethod && !m.native) {
- ctx1 = genLoad(rhs, ctx1, m.returnType);
+ ctx1 = genLoad(rhs, ctx1, m.returnType)
// reverse the order of the local variables, to match the source-order
m.locals = m.locals.reverse
@@ -139,7 +137,7 @@ abstract class GenICode extends SubComponent {
else
ctx1.bb.closeWith(RETURN(m.returnType))
}
- if (!ctx1.bb.closed) ctx1.bb.close
+ if (!ctx1.bb.closed) ctx1.bb.close()
prune(ctx1.method)
} else
ctx1.method.setCode(NoCode)
@@ -160,18 +158,13 @@ abstract class GenICode extends SubComponent {
* and not produce any value. Use genLoad for expressions which leave
* a value on top of the stack.
*
- * @param tree ...
- * @param ctx ...
* @return a new context. This is necessary for control flow instructions
* which may change the current basic block.
*/
private def genStat(tree: Tree, ctx: Context): Context = tree match {
case Assign(lhs @ Select(_, _), rhs) =>
val isStatic = lhs.symbol.isStaticMember
- var ctx1 = if (isStatic) ctx
- else if (forMSIL && msil_IsValuetypeInstField(lhs.symbol))
- msil_genLoadQualifierAddress(lhs, ctx)
- else genLoadQualifier(lhs, ctx)
+ var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx)
ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos)
@@ -193,7 +186,7 @@ abstract class GenICode extends SubComponent {
val thrownKind = toTypeKind(expr.tpe)
val ctx1 = genLoad(expr, ctx, thrownKind)
ctx1.bb.emit(THROW(expr.tpe.typeSymbol), expr.pos)
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
(ctx1, NothingReference)
}
@@ -231,10 +224,10 @@ abstract class GenICode extends SubComponent {
// binary operation
case rarg :: Nil =>
- resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
+ resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil)
if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
assert(resKind.isIntegralType | resKind == BOOL,
- resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
+ resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1)
ctx1 = genLoad(larg, ctx1, resKind)
ctx1 = genLoad(rarg,
@@ -264,11 +257,6 @@ abstract class GenICode extends SubComponent {
}
/** Generate primitive array operations.
- *
- * @param tree ...
- * @param ctx ...
- * @param code ...
- * @return ...
*/
private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = {
import scalaPrimitives._
@@ -283,14 +271,19 @@ abstract class GenICode extends SubComponent {
if (scalaPrimitives.isArrayGet(code)) {
// load argument on stack
debugassert(args.length == 1,
- "Too many arguments for array get operation: " + tree);
+ "Too many arguments for array get operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
generatedType = elem
ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
+ // it's tempting to just drop array loads of type Null instead
+ // of adapting them but array accesses can cause
+ // ArrayIndexOutOfBounds so we can't. Besides, Array[Null]
+ // probably isn't common enough to figure out an optimization
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
}
else if (scalaPrimitives.isArraySet(code)) {
debugassert(args.length == 2,
- "Too many arguments for array set operation: " + tree);
+ "Too many arguments for array set operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
// the following line should really be here, but because of bugs in erasure
@@ -310,9 +303,6 @@ abstract class GenICode extends SubComponent {
val Apply(fun, args) = tree
val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
var monitorResult: Local = null
-
- // if the synchronized block returns a result, store it in a local variable. just leaving
- // it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks)
val argTpe = args.head.tpe
val hasResult = expectedType != UNIT
if (hasResult)
@@ -345,7 +335,7 @@ abstract class GenICode extends SubComponent {
MONITOR_EXIT() setPos tree.pos,
THROW(ThrowableClass)
))
- exhCtx.bb.enterIgnoreMode
+ exhCtx.bb.enterIgnoreMode()
exhCtx
})), EmptyTree, tree)
@@ -359,9 +349,9 @@ abstract class GenICode extends SubComponent {
private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val If(cond, thenp, elsep) = tree
- var thenCtx = ctx.newBlock
- var elseCtx = ctx.newBlock
- val contCtx = ctx.newBlock
+ var thenCtx = ctx.newBlock()
+ var elseCtx = ctx.newBlock()
+ val contCtx = ctx.newBlock()
genCond(cond, ctx, thenCtx, elseCtx)
@@ -386,12 +376,14 @@ abstract class GenICode extends SubComponent {
"I produce UNIT in a context where " + expectedType + " is expected!")
// alternatives may be already closed by a tail-recursive jump
+ val contReachable = !(thenCtx.bb.ignore && elseCtx.bb.ignore)
thenCtx.bb.closeWith(JUMP(contCtx.bb))
elseCtx.bb.closeWith(
if (elsep == EmptyTree) JUMP(contCtx.bb)
else JUMP(contCtx.bb) setPos tree.pos
)
+ contCtx.bb killUnless contReachable
(contCtx, resKind)
}
private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = {
@@ -414,8 +406,8 @@ abstract class GenICode extends SubComponent {
(pat.symbol.tpe.typeSymbol, kind, {
ctx: Context =>
- ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
- genLoad(body, ctx, kind);
+ ctx.bb.emit(STORE_LOCAL(exception), pat.pos)
+ genLoad(body, ctx, kind)
})
}
}
@@ -432,7 +424,7 @@ abstract class GenICode extends SubComponent {
private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val sym = tree.symbol
- val Apply(fun @ Select(receiver, _), args) = tree
+ val Apply(fun @ Select(receiver, _), _) = tree
val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
if (scalaPrimitives.isArithmeticOp(code))
@@ -444,7 +436,7 @@ abstract class GenICode extends SubComponent {
else if (isArrayOp(code))
genArrayOp(tree, ctx, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
- val trueCtx, falseCtx, afterCtx = ctx.newBlock
+ val trueCtx, falseCtx, afterCtx = ctx.newBlock()
genCond(tree, ctx, trueCtx, falseCtx)
trueCtx.bb.emitOnly(
@@ -471,132 +463,6 @@ abstract class GenICode extends SubComponent {
}
/**
- * forMSIL
- */
- private def msil_IsValuetypeInstMethod(msym: Symbol) = (
- loaders.clrTypes.methods get msym exists (mMSIL =>
- mMSIL.IsInstance && mMSIL.DeclaringType.IsValueType
- )
- )
- private def msil_IsValuetypeInstField(fsym: Symbol) = (
- loaders.clrTypes.fields get fsym exists (fMSIL =>
- !fMSIL.IsStatic && fMSIL.DeclaringType.IsValueType
- )
- )
-
- /**
- * forMSIL: Adds a local var, the emitted code requires one more slot on the stack as on entry
- */
- private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) {
- val REFERENCE(clssym) = kind
- assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym)
- val local = ctx.makeLocal(pos, clssym.tpe, "tmp")
- ctx.method.addLocal(local)
- ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos)
- ctx.bb.emit(CIL_INITOBJ(kind), pos)
- val instr = if (leaveAddressOnStackInstead)
- CIL_LOAD_LOCAL_ADDRESS(local)
- else
- LOAD_LOCAL(local)
- ctx.bb.emit(instr, pos)
- }
-
- /**
- * forMSIL
- */
- private def msil_genLoadAddressOf(tree: Tree, ctx: Context, expectedType: TypeKind, butRawValueIsAlsoGoodEnough: Boolean): Context = {
- var generatedType = expectedType
- var addressTaken = false
- debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
-
- var resCtx: Context = tree match {
-
- // emits CIL_LOAD_FIELD_ADDRESS
- case Select(qualifier, selector) if (!tree.symbol.isModule) =>
- addressTaken = true
- val sym = tree.symbol
- generatedType = toTypeKind(sym.info)
-
- if (sym.isStaticMember) {
- ctx.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, true), tree.pos)
- ctx
- } else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, false), tree.pos)
- ctx1
- }
-
- // emits CIL_LOAD_LOCAL_ADDRESS
- case Ident(name) if (!tree.symbol.isPackage && !tree.symbol.isModule)=>
- addressTaken = true
- val sym = tree.symbol
- try {
- val Some(l) = ctx.method.lookupLocal(sym)
- ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(l), tree.pos)
- generatedType = l.kind // actually, should be "V&" but the callsite is aware of this
- } catch {
- case ex: MatchError =>
- abort("symbol " + sym + " does not exist in " + ctx.method)
- }
- ctx
-
- // emits CIL_LOAD_ARRAY_ITEM_ADDRESS
- case Apply(fun, args) =>
- if (isPrimitive(fun.symbol)) {
-
- val sym = tree.symbol
- val Apply(fun @ Select(receiver, _), args) = tree
- val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
-
- if (isArrayOp(code)) {
- val arrayObj = receiver
- val k = toTypeKind(arrayObj.tpe)
- val ARRAY(elementType) = k
- if (scalaPrimitives.isArrayGet(code)) {
- var ctx1 = genLoad(arrayObj, ctx, k)
- // load argument on stack
- debugassert(args.length == 1, "Too many arguments for array get operation: " + tree)
- ctx1 = genLoad(args.head, ctx1, INT)
- generatedType = elementType // actually "managed pointer to element type" but the callsite is aware of this
- ctx1.bb.emit(CIL_LOAD_ARRAY_ITEM_ADDRESS(elementType), tree.pos)
- addressTaken = true
- ctx1
- } else null
- } else null
- } else null
-
- case This(qual) =>
- /* TODO: this case handler is a placeholder for the time when Level 2 support for valuetypes is in place,
- in particular when invoking other methods on this where this is a valuetype value (boxed or not).
- As receiver, a managed pointer is expected, and a plain ldarg.0 achieves just that. */
- addressTaken = true
- genLoad(tree, ctx, expectedType)
-
- case _ =>
- null /* A method returning ByRef won't pass peverify, so I guess this case handler is dead code.
- Even if it's not, the code below to handler !addressTaken below. */
- }
-
- if (!addressTaken) {
- resCtx = genLoad(tree, ctx, expectedType)
- if (!butRawValueIsAlsoGoodEnough) {
- // raw value on stack (must be an intermediate result, e.g. returned by method call), take address
- addressTaken = true
- val boxType = expectedType // toTypeKind(expectedType /* TODO FIXME */)
- resCtx.bb.emit(BOX(boxType), tree.pos)
- resCtx.bb.emit(CIL_UNBOX(boxType), tree.pos)
- }
- }
-
- // emit conversion
- if (generatedType != expectedType)
- abort("Unexpected tree in msil_genLoadAddressOf: " + tree + " at: " + tree.pos)
-
- resCtx
- }
-
-
- /**
* Generate code for trees that produce values on the stack
*
* @param tree The tree to be translated
@@ -613,7 +479,11 @@ abstract class GenICode extends SubComponent {
val resCtx: Context = tree match {
case LabelDef(name, params, rhs) =>
def genLoadLabelDef = {
- val ctx1 = ctx.newBlock
+ val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because
+ // label defs can be the target of jumps from other locations.
+ // that means label defs can lead to unreachable code without
+ // proper reachability analysis
+
if (nme.isLoopHeaderLabel(name))
ctx1.bb.loopHeader = true
@@ -627,7 +497,7 @@ abstract class GenICode extends SubComponent {
val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
ctx1.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
+ ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)))
}
ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
@@ -645,13 +515,13 @@ abstract class GenICode extends SubComponent {
val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
if (rhs == EmptyTree) {
- debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
+ debuglog("Uninitialized variable " + tree + " at: " + (tree.pos))
ctx.bb.emit(getZeroOf(local.kind))
}
var ctx1 = ctx
if (rhs != EmptyTree)
- ctx1 = genLoad(rhs, ctx, local.kind);
+ ctx1 = genLoad(rhs, ctx, local.kind)
ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
ctx1.scope.add(local)
@@ -695,7 +565,8 @@ abstract class GenICode extends SubComponent {
// we have to run this without the same finalizer in
// the list, otherwise infinite recursion happens for
// finalizers that contain 'return'
- val fctx = finalizerCtx.newBlock
+ val fctx = finalizerCtx.newBlock()
+ fctx.bb killIf ctx1.bb.ignore
ctx1.bb.closeWith(JUMP(fctx.bb))
ctx1 = genLoad(f1, fctx, UNIT)
}
@@ -708,7 +579,7 @@ abstract class GenICode extends SubComponent {
}
adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
generatedType = expectedType
ctx1
}
@@ -760,7 +631,7 @@ abstract class GenICode extends SubComponent {
} else {
genCast(l, r, ctx1, cast)
}
- generatedType = if (cast) r else BOOL;
+ generatedType = if (cast) r else BOOL
ctx1
}
genLoadApply1
@@ -773,7 +644,7 @@ abstract class GenICode extends SubComponent {
// on the stack (contrary to what the type in the AST says).
case Apply(fun @ Select(Super(_, mix), _), args) =>
def genLoadApply2 = {
- debuglog("Call to super: " + tree);
+ debuglog("Call to super: " + tree)
val invokeStyle = SuperCall(mix)
// if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
@@ -819,31 +690,15 @@ abstract class GenICode extends SubComponent {
debugassert(ctor.owner == cls,
"Symbol " + ctor.owner.fullName + " is different than " + tpt)
- val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
- /* parameterful constructors are the only possible custom constructors,
- a default constructor can't be defined for valuetypes, CLR dixit */
- val isDefaultConstructor = args.isEmpty
- if (isDefaultConstructor) {
- msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
- ctx
- } else {
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
- ctx1
- }
- } else {
- val nw = NEW(rt)
- ctx.bb.emit(nw, tree.pos)
- ctx.bb.emit(DUP(generatedType))
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
-
- val init = CALL_METHOD(ctor, Static(true))
- nw.init = init
- ctx1.bb.emit(init, tree.pos)
- ctx1
- }
- ctx2
+ val nw = NEW(rt)
+ ctx.bb.emit(nw, tree.pos)
+ ctx.bb.emit(DUP(generatedType))
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ val init = CALL_METHOD(ctor, Static(onInstance = true))
+ nw.init = init
+ ctx1.bb.emit(init, tree.pos)
+ ctx1
case _ =>
abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
}
@@ -852,14 +707,14 @@ abstract class GenICode extends SubComponent {
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
def genLoadApply4 = {
- debuglog("BOX : " + fun.symbol.fullName);
+ debuglog("BOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val nativeKind = toTypeKind(expr.tpe)
if (settings.Xdce.value) {
// we store this boxed value to a local, even if not really needed.
// boxing optimization might use it, and dead code elimination will
// take care of unnecessary stores
- var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
+ val loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
ctx1.bb.emit(STORE_LOCAL(loc1))
ctx1.bb.emit(LOAD_LOCAL(loc1))
}
@@ -877,12 +732,6 @@ abstract class GenICode extends SubComponent {
ctx1.bb.emit(UNBOX(boxType), expr.pos)
ctx1
- case Apply(fun @ _, List(expr)) if (forMSIL && loaders.clrTypes.isAddressOf(fun.symbol)) =>
- debuglog("ADDRESSOF : " + fun.symbol.fullName);
- val ctx1 = msil_genLoadAddressOf(expr, ctx, toTypeKind(expr.tpe), butRawValueIsAlsoGoodEnough = false)
- generatedType = toTypeKind(fun.symbol.tpe.resultType)
- ctx1
-
case app @ Apply(fun, args) =>
def genLoadApply6 = {
val sym = fun.symbol
@@ -908,35 +757,28 @@ abstract class GenICode extends SubComponent {
// (if it's not in ignore mode, double-closing is an error)
val ctx1 = genLoadLabelArguments(args, label, ctx)
ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
ctx1
} else if (isPrimitive(sym)) { // primitive method call
val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
generatedType = resKind
newCtx
} else { // normal method call
- debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
+ debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember)
val invokeStyle =
if (sym.isStaticMember)
- Static(false)
+ Static(onInstance = false)
else if (sym.isPrivate || sym.isClassConstructor)
- Static(true)
+ Static(onInstance = true)
else
Dynamic
- var ctx1 =
- if (invokeStyle.hasInstance) {
- if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
- msil_genLoadQualifierAddress(fun, ctx)
- else
- genLoadQualifier(fun, ctx)
- } else ctx
-
+ var ctx1 = if (invokeStyle.hasInstance) genLoadQualifier(fun, ctx) else ctx
ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
val cm = CALL_METHOD(sym, invokeStyle)
/** In a couple cases, squirrel away a little extra information in the
- * CALL_METHOD for use by GenJVM.
+ * CALL_METHOD for use by GenASM.
*/
fun match {
case Select(qual, _) =>
@@ -960,14 +802,15 @@ abstract class GenICode extends SubComponent {
}
generatedType =
if (sym.isClassConstructor) UNIT
- else toTypeKind(sym.info.resultType);
+ else toTypeKind(sym.info.resultType)
+ // deal with methods that return Null
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
ctx1
}
}
genLoadApply6
case ApplyDynamic(qual, args) =>
- assert(!forMSIL, tree)
// TODO - this is where we'd catch dynamic applies for invokedynamic.
sys.error("No invokedynamic support yet.")
// val ctx1 = genLoad(qual, ctx, ObjectReference)
@@ -1006,16 +849,22 @@ abstract class GenICode extends SubComponent {
generatedType = toTypeKind(sym.info)
val hostClass = findHostClass(qualifier.tpe, sym)
log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
+
+ def genLoadQualUnlessElidable: Context =
+ if (qualSafeToElide) ctx else genLoadQualifier(tree, ctx)
if (sym.isModule) {
- genLoadModule(ctx, tree)
- }
- else if (sym.isStaticMember) {
- ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
- ctx
+ genLoadModule(genLoadQualUnlessElidable, tree)
} else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
+ val isStatic = sym.isStaticMember
+ val ctx1 = if (isStatic) genLoadQualUnlessElidable
+ else genLoadQualifier(tree, ctx)
+ ctx1.bb.emit(LOAD_FIELD(sym, isStatic) setHostClass hostClass, tree.pos)
+ // it's tempting to drop field accesses of type Null instead of adapting them,
+ // but field access can cause static class init so we can't. Besides, fields
+ // of type Null probably aren't common enough to figure out an optimization
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
ctx1
}
}
@@ -1047,16 +896,16 @@ abstract class GenICode extends SubComponent {
def genLoadLiteral = {
if (value.tag != UnitTag) (value.tag, expectedType) match {
case (IntTag, LONG) =>
- ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos)
generatedType = LONG
case (FloatTag, DOUBLE) =>
- ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos)
generatedType = DOUBLE
case (NullTag, _) =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = NullReference
case _ =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = toTypeKind(tree.tpe)
}
ctx
@@ -1064,10 +913,10 @@ abstract class GenICode extends SubComponent {
genLoadLiteral
case Block(stats, expr) =>
- ctx.enterScope
+ ctx.enterScope()
var ctx1 = genStat(stats, ctx)
ctx1 = genLoad(expr, ctx1, expectedType)
- ctx1.exitScope
+ ctx1.exitScope()
ctx1
case Typed(Super(_, _), _) =>
@@ -1104,9 +953,11 @@ abstract class GenICode extends SubComponent {
case Match(selector, cases) =>
def genLoadMatch = {
- debuglog("Generating SWITCH statement.");
- var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
- val afterCtx = ctx1.newBlock
+ debuglog("Generating SWITCH statement.")
+ val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
+ val afterCtx = ctx1.newBlock()
+ afterCtx.bb killIf ctx1.bb.ignore
+ var afterCtxReachable = false
var caseCtx: Context = null
generatedType = toTypeKind(tree.tpe)
@@ -1116,7 +967,8 @@ abstract class GenICode extends SubComponent {
for (caze @ CaseDef(pat, guard, body) <- cases) {
assert(guard == EmptyTree, guard)
- val tmpCtx = ctx1.newBlock
+ val tmpCtx = ctx1.newBlock()
+ tmpCtx.bb killIf ctx1.bb.ignore
pat match {
case Literal(value) =>
tags = value.intValue :: tags
@@ -1138,12 +990,15 @@ abstract class GenICode extends SubComponent {
}
caseCtx = genLoad(body, tmpCtx, generatedType)
+ afterCtxReachable ||= !caseCtx.bb.ignore
// close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
}
+ afterCtxReachable ||= (default == afterCtx)
ctx1.bb.emitOnly(
SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
)
+ afterCtx.bb killUnless afterCtxReachable
afterCtx
}
genLoadMatch
@@ -1163,29 +1018,61 @@ abstract class GenICode extends SubComponent {
resCtx
}
+
+ /**
+ * If we have a method call, field load, or array element load of type Null then
+ * we need to convince the JVM that we have a null value because in Scala
+ * land Null is a subtype of all ref types, but in JVM land scala.runtime.Null$
+ * is not. Note we don't have to adapt loads of locals because the JVM type
+ * system for locals does have a null type which it tracks internally. As
+ * long as we adapt these other things, the JVM will know that a Scala local of
+ * type Null is holding a null.
+ */
+ private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ log(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)")
+
+ // Don't need to adapt null to unit because we'll just drop it anyway. Don't
+ // need to adapt to Object or AnyRef because the JVM is happy with
+ // upcasting Null to them.
+ // We do have to adapt from NullReference to NullReference because we could be storing
+ // this value into a local of type Null and we want the JVM to see that it's
+ // a null value so we don't have to also adapt local loads.
+ if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) {
+ assert(to.isRefOrArrayType, s"Attempt to adapt a null to a non reference type $to.")
+ // adapt by dropping what we've got and pushing a null which
+ // will convince the JVM we really do have null
+ ctx.bb.emit(DROP(from), pos)
+ ctx.bb.emit(CONSTANT(Constant(null)), pos)
+ }
+ }
private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
// An awful lot of bugs explode here - let's leave ourselves more clues.
// A typical example is an overloaded type assigned after typer.
log(s"GenICode#adapt($from, $to, $ctx, $pos)")
- val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- def checkAssertions() {
- def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
- debugassert(from != UNIT, msg)
- assert(!from.isReferenceType && !to.isReferenceType, msg)
- }
- if (conforms) from match {
- case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
- case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
- case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
- case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
- case _ => ()
- }
- else to match {
- case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
- case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
+
+ (from, to) match {
+ // The JVM doesn't have a Nothing equivalent, so it doesn't know that a method of type Nothing can't actually return. So for instance, with
+ // def f: String = ???
+ // we need
+ // 0: getstatic #25; //Field scala/Predef$.MODULE$:Lscala/Predef$;
+ // 3: invokevirtual #29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$;
+ // 6: athrow
+ // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable
+ case (NothingReference, _) =>
+ ctx.bb.emit(THROW(ThrowableClass))
+ ctx.bb.enterIgnoreMode()
+ case _ if from isAssignabledTo to =>
+ ()
+ case (_, UNIT) =>
+ ctx.bb.emit(DROP(from), pos)
+ // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem
+ case _ if !from.isRefOrArrayType && !to.isRefOrArrayType =>
+ coerce(from, to)
+ case _ =>
+ assert(false, s"Can't convert from $from to $to in unit ${unit.source} at $pos")
}
}
@@ -1198,15 +1085,6 @@ abstract class GenICode extends SubComponent {
abort("Unknown qualifier " + tree)
}
- /** forMSIL */
- private def msil_genLoadQualifierAddress(tree: Tree, ctx: Context): Context =
- tree match {
- case Select(qualifier, _) =>
- msil_genLoadAddressOf(qualifier, ctx, toTypeKind(qualifier.tpe), butRawValueIsAlsoGoodEnough = false)
- case _ =>
- abort("Unknown qualifier " + tree)
- }
-
/**
* Generate code that loads args into label parameters.
*/
@@ -1253,7 +1131,9 @@ abstract class GenICode extends SubComponent {
if (!tree.symbol.isPackageClass) tree.symbol
else tree.symbol.info.member(nme.PACKAGE) match {
case NoSymbol => abort("Cannot use package as value: " + tree)
- case s => debugwarn("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ case s =>
+ devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}")
+ s.moduleClass
}
)
debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
@@ -1387,18 +1267,14 @@ abstract class GenICode extends SubComponent {
// }
/** Generate string concatenation.
- *
- * @param tree ...
- * @param ctx ...
- * @return ...
*/
def genStringConcat(tree: Tree, ctx: Context): Context = {
liftStringConcat(tree) match {
// Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
- case List(Literal(Constant("")), arg) if !forMSIL =>
+ case List(Literal(Constant("")), arg) =>
debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg)
val ctx1 = genLoad(arg, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(false)), arg.pos)
+ ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(onInstance = false)), arg.pos)
ctx1
case concatenations =>
debuglog("Lifted string concatenations for " + tree + "\n to: " + concatenations)
@@ -1423,7 +1299,7 @@ abstract class GenICode extends SubComponent {
}
val ctx1 = genLoad(tree, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(hashMethod, Static(false)))
+ ctx1.bb.emit(CALL_METHOD(hashMethod, Static(onInstance = false)))
ctx1
}
@@ -1479,9 +1355,17 @@ abstract class GenICode extends SubComponent {
private def genCond(tree: Tree,
ctx: Context,
thenCtx: Context,
- elseCtx: Context): Unit =
- {
- def genComparisonOp(l: Tree, r: Tree, code: Int) {
+ elseCtx: Context): Boolean =
+ {
+ /**
+ * Generate the de-sugared comparison mechanism that will underly an '=='
+ *
+ * @param l left-hand side of the '=='
+ * @param r right-hand side of the '=='
+ * @param code the comparison operator to use
+ * @return true if either branch can continue normally to a follow on block, false otherwise
+ */
+ def genComparisonOp(l: Tree, r: Tree, code: Int): Boolean = {
val op: TestOp = code match {
case scalaPrimitives.LT => LT
case scalaPrimitives.LE => LE
@@ -1497,27 +1381,33 @@ abstract class GenICode extends SubComponent {
lazy val nonNullSide = ifOneIsNull(l, r)
if (isReferenceEqualityOp(code) && nonNullSide != null) {
val ctx1 = genLoad(nonNullSide, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.emitOnly(
CZJUMP(thenCtx.bb, elseCtx.bb, op, ObjectReference)
)
+ branchesReachable
}
else {
val kind = getMaxType(l.tpe :: r.tpe :: Nil)
var ctx1 = genLoad(l, ctx, kind)
ctx1 = genLoad(r, ctx1, kind)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.emitOnly(
CJUMP(thenCtx.bb, elseCtx.bb, op, kind) setPos r.pos
)
+ branchesReachable
}
}
- debuglog("Entering genCond with tree: " + tree);
+ debuglog("Entering genCond with tree: " + tree)
// the default emission
- def default() = {
+ def default(): Boolean = {
val ctx1 = genLoad(tree, ctx, BOOL)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.closeWith(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) setPos tree.pos)
+ branchesReachable
}
tree match {
@@ -1529,11 +1419,12 @@ abstract class GenICode extends SubComponent {
lazy val Select(lhs, _) = fun
lazy val rhs = args.head
- def genZandOrZor(and: Boolean) = {
- val ctxInterm = ctx.newBlock
+ def genZandOrZor(and: Boolean): Boolean = {
+ val ctxInterm = ctx.newBlock()
- if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
+ val branchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
else genCond(lhs, ctx, thenCtx, ctxInterm)
+ ctxInterm.bb killUnless branchesReachable
genCond(rhs, ctxInterm, thenCtx, elseCtx)
}
@@ -1556,10 +1447,10 @@ abstract class GenICode extends SubComponent {
else if (isComparisonOp(code))
genComparisonOp(lhs, rhs, code)
else
- default
+ default()
}
- case _ => default
+ case _ => default()
}
}
@@ -1572,10 +1463,11 @@ abstract class GenICode extends SubComponent {
* @param ctx current context
* @param thenCtx target context if the comparison yields true
* @param elseCtx target context if the comparison yields false
+ * @return true if either branch can continue normally to a follow on block, false otherwise
*/
- def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Unit = {
+ def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = {
def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse {
- ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR)
+ ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR.toString)
}
/** True if the equality comparison is between values that require the use of the rich equality
@@ -1613,26 +1505,40 @@ abstract class GenICode extends SubComponent {
val ctx1 = genLoad(l, ctx, ObjectReference)
val ctx2 = genLoad(r, ctx1, ObjectReference)
+ val branchesReachable = !ctx2.bb.ignore
ctx2.bb.emitOnly(
- CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)),
+ CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(onInstance = false)),
CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
)
+ branchesReachable
}
else {
- if (isNull(l))
+ if (isNull(l)) {
// null == expr -> expr eq null
- genLoad(r, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
- else if (isNull(r)) {
+ val ctx1 = genLoad(r, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ branchesReachable
+ } else if (isNull(r)) {
// expr == null -> expr eq null
- genLoad(l, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ val ctx1 = genLoad(l, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ branchesReachable
} else {
val eqEqTempLocal = getTempLocal
var ctx1 = genLoad(l, ctx, ObjectReference)
- lazy val nonNullCtx = ctx1.newBlock
+ val branchesReachable = !ctx1.bb.ignore
+ lazy val nonNullCtx = {
+ val block = ctx1.newBlock()
+ block.bb killUnless branchesReachable
+ block
+ }
// l == r -> if (l eq null) r eq null else l.equals(r)
ctx1 = genLoad(r, ctx1, ObjectReference)
- val nullCtx = ctx1.newBlock
+ val nullCtx = ctx1.newBlock()
+ nullCtx.bb killUnless branchesReachable
ctx1.bb.emitOnly(
STORE_LOCAL(eqEqTempLocal) setPos l.pos,
@@ -1649,6 +1555,7 @@ abstract class GenICode extends SubComponent {
CALL_METHOD(Object_equals, Dynamic),
CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
)
+ branchesReachable
}
}
}
@@ -1704,8 +1611,6 @@ abstract class GenICode extends SubComponent {
* If the block consists of a single unconditional jump, prune
* it by replacing the instructions in the predecessor to jump
* directly to the JUMP target of the block.
- *
- * @param method ...
*/
def prune(method: IMethod) = {
var changed = false
@@ -1717,14 +1622,14 @@ abstract class GenICode extends SubComponent {
case _ => None
}
if (block.size == 1 && optCont.isDefined) {
- val Some(cont) = optCont;
- val pred = block.predecessors;
- debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")");
+ val Some(cont) = optCont
+ val pred = block.predecessors
+ debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")")
pred foreach { p =>
changed = true
p.lastInstruction match {
case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty if branch.");
+ debuglog("Pruning empty if branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1737,7 +1642,7 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty ifz branch.");
+ debuglog("Pruning empty ifz branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1750,12 +1655,12 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds"))
case JUMP(b) if (b == block) =>
- debuglog("Pruning empty JMP branch.");
+ debuglog("Pruning empty JMP branch.")
val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
debugassert(replaced, "Didn't find p.lastInstruction")
case SWITCH(tags, labels) if (labels contains block) =>
- debuglog("Pruning empty SWITCH branch.");
+ debuglog("Pruning empty SWITCH branch.")
p.replaceInstruction(p.lastInstruction,
SWITCH(tags, labels map (l => if (l == block) cont else l)))
@@ -1771,7 +1676,7 @@ abstract class GenICode extends SubComponent {
e.covered = e.covered filter (_ != block)
e.blocks = e.blocks filter (_ != block)
if (e.startBlock eq block)
- e setStartBlock cont;
+ e setStartBlock cont
}
}
}
@@ -1783,7 +1688,7 @@ abstract class GenICode extends SubComponent {
method.blocks foreach prune0
} while (changed)
- debuglog("Prune fixpoint reached in " + n + " iterations.");
+ debuglog("Prune fixpoint reached in " + n + " iterations.")
}
def getMaxType(ts: List[Type]): TypeKind =
@@ -1825,9 +1730,7 @@ abstract class GenICode extends SubComponent {
t match {
case t @ Apply(_, args) if sym.isLabel && !boundLabels(sym) =>
val newSym = getLabel(sym.pos, sym.name)
- val tree = Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos
- tree.tpe = t.tpe
- tree
+ Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos setType t.tpe
case t @ LabelDef(name, params, rhs) =>
val newSym = getLabel(t.pos, name)
@@ -1957,22 +1860,11 @@ abstract class GenICode extends SubComponent {
}
def addFinalizer(f: Tree, ctx: Context): this.type = {
- cleanups = Finalizer(f, ctx) :: cleanups;
- this
- }
-
- def removeFinalizer(f: Tree): this.type = {
- assert(cleanups.head contains f,
- "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f);
- cleanups = cleanups.tail
+ cleanups = Finalizer(f, ctx) :: cleanups
this
}
/** Prepare a new context upon entry into a method.
- *
- * @param m ...
- * @param d ...
- * @return ...
*/
def enterMethod(m: IMethod, d: DefDef): Context = {
val ctx1 = new Context(this) setMethod(m)
@@ -1981,13 +1873,13 @@ abstract class GenICode extends SubComponent {
ctx1.bb = ctx1.method.startBlock
ctx1.defdef = d
ctx1.scope = EmptyScope
- ctx1.enterScope
+ ctx1.enterScope()
ctx1
}
/** Return a new context for a new basic block. */
def newBlock(): Context = {
- val block = method.code.newBlock
+ val block = method.code.newBlock()
handlers foreach (_ addCoveredBlock block)
currentExceptionHandlers foreach (_ addBlock block)
block.varsInScope.clear()
@@ -2011,13 +1903,12 @@ abstract class GenICode extends SubComponent {
* 'covered' by this exception handler (in addition to the
* previously active handlers).
*/
- private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = {
+ private def newExceptionHandler(cls: Symbol, pos: Position): ExceptionHandler = {
handlerCount += 1
val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
- exh.resultKind = resultKind
method.addHandler(exh)
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
exh
}
@@ -2027,7 +1918,7 @@ abstract class GenICode extends SubComponent {
private def addActiveHandler(exh: ExceptionHandler) {
handlerCount += 1
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
}
/** Return a new context for generating code for the given
@@ -2035,7 +1926,7 @@ abstract class GenICode extends SubComponent {
*/
private def enterExceptionHandler(exh: ExceptionHandler): Context = {
currentExceptionHandlers ::= exh
- val ctx = newBlock
+ val ctx = newBlock()
exh.setStartBlock(ctx.bb)
ctx
}
@@ -2044,16 +1935,6 @@ abstract class GenICode extends SubComponent {
currentExceptionHandlers = currentExceptionHandlers.tail
}
- /** Remove the given handler from the list of active exception handlers. */
- def removeActiveHandler(exh: ExceptionHandler): Unit = {
- assert(handlerCount > 0 && handlers.head == exh,
- "Wrong nesting of exception handlers." + this + " for " + exh)
- handlerCount -= 1
- handlers = handlers.tail
- debuglog("removed handler: " + exh);
-
- }
-
/** Clone the current context */
def dup: Context = new Context(this)
@@ -2072,23 +1953,55 @@ abstract class GenICode extends SubComponent {
* It returns the resulting context, with the same active handlers as
* before the call. Use it like:
*
- * <code> ctx.Try( ctx => {
+ * ` ctx.Try( ctx => {
* ctx.bb.emit(...) // protected block
* }, (ThrowableClass,
* ctx => {
* ctx.bb.emit(...); // exception handler
* }), (AnotherExceptionClass,
* ctx => {...
- * } ))</code>
+ * } ))`
+ *
+ * The resulting structure will look something like
+ *
+ * outer:
+ * // this 'useless' jump will be removed later,
+ * // for now it separates the try body's blocks from previous
+ * // code since the try body needs its own exception handlers
+ * JUMP body
+ *
+ * body:
+ * [ try body ]
+ * JUMP normalExit
+ *
+ * catch[i]:
+ * [ handler[i] body ]
+ * JUMP normalExit
+ *
+ * catchAll:
+ * STORE exception
+ * [ finally body ]
+ * THROW exception
+ *
+ * normalExit:
+ * [ finally body ]
+ *
+ * each catch[i] will cover body. catchAll will cover both body and each catch[i]
+ * Additional finally copies are created on the emission of every RETURN in the try body and exception handlers.
+ *
+ * This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception
+ * handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally.
+ * Later reachability analysis will remove unreacahble code.
*/
def Try(body: Context => Context,
handlers: List[(Symbol, TypeKind, Context => Context)],
finalizer: Tree,
- tree: Tree) = if (forMSIL) TryMsil(body, handlers, finalizer, tree) else {
+ tree: Tree) = {
- val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
+ val outerCtx = this.dup // context for generating exception handlers, covered by the catch-all finalizer
val finalizerCtx = this.dup // context for generating finalizer handler
- val afterCtx = outerCtx.newBlock
+ val normalExitCtx = outerCtx.newBlock() // context where flow will go on a "normal" (non-return, non-throw) exit from a try or catch handler
+ var normalExitReachable = false
var tmp: Local = null
val kind = toTypeKind(tree.tpe)
val guardResult = kind != UNIT && mayCleanStack(finalizer)
@@ -2102,7 +2015,8 @@ abstract class GenICode extends SubComponent {
}
def emitFinalizer(ctx: Context): Context = if (!finalizer.isEmpty) {
- val ctx1 = finalizerCtx.dup.newBlock
+ val ctx1 = finalizerCtx.dup.newBlock()
+ ctx1.bb killIf ctx.bb.ignore
ctx.bb.closeWith(JUMP(ctx1.bb))
if (guardResult) {
@@ -2115,107 +2029,53 @@ abstract class GenICode extends SubComponent {
} else ctx
- val finalizerExh = if (finalizer != EmptyTree) Some({
- val exh = outerCtx.newExceptionHandler(NoSymbol, toTypeKind(finalizer.tpe), finalizer.pos) // finalizer covers exception handlers
- this.addActiveHandler(exh) // .. and body aswell
- val ctx = finalizerCtx.enterExceptionHandler(exh)
- val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc")
- loadException(ctx, exh, finalizer.pos)
- ctx.bb.emit(STORE_LOCAL(exception));
- val ctx1 = genLoad(finalizer, ctx, UNIT);
- ctx1.bb.emit(LOAD_LOCAL(exception));
- ctx1.bb.emit(THROW(ThrowableClass));
- ctx1.bb.enterIgnoreMode;
- ctx1.bb.close
- finalizerCtx.endHandler()
- exh
- }) else None
-
- val exhs = handlers.map { case (sym, kind, handler) => // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
- val exh = this.newExceptionHandler(sym, kind, tree.pos)
- var ctx1 = outerCtx.enterExceptionHandler(exh)
- ctx1.addFinalizer(finalizer, finalizerCtx)
- loadException(ctx1, exh, tree.pos)
- ctx1 = handler(ctx1)
- // emit finalizer
- val ctx2 = emitFinalizer(ctx1)
- ctx2.bb.closeWith(JUMP(afterCtx.bb))
+ // Generate the catch-all exception handler that deals with uncaught exceptions coming
+ // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows
+ // the exception
+ if (settings.YdisableUnreachablePrevention.value || !outerCtx.bb.ignore) {
+ if (finalizer != EmptyTree) {
+ val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers
+ this.addActiveHandler(exh) // .. and body aswell
+ val exhStartCtx = finalizerCtx.enterExceptionHandler(exh)
+ exhStartCtx.bb killIf outerCtx.bb.ignore
+ val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc")
+ loadException(exhStartCtx, exh, finalizer.pos)
+ exhStartCtx.bb.emit(STORE_LOCAL(exception))
+ val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT)
+ exhEndCtx.bb.emit(LOAD_LOCAL(exception))
+ exhEndCtx.bb.closeWith(THROW(ThrowableClass))
+ exhEndCtx.bb.enterIgnoreMode()
+ finalizerCtx.endHandler()
+ }
+
+ // Generate each exception handler
+ for ((sym, kind, handler) <- handlers) {
+ val exh = this.newExceptionHandler(sym, tree.pos)
+ val exhStartCtx = outerCtx.enterExceptionHandler(exh)
+ exhStartCtx.bb killIf outerCtx.bb.ignore
+ exhStartCtx.addFinalizer(finalizer, finalizerCtx)
+ loadException(exhStartCtx, exh, tree.pos)
+ val exhEndCtx = handler(exhStartCtx)
+ normalExitReachable ||= !exhEndCtx.bb.ignore
+ exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
outerCtx.endHandler()
- exh
}
- val bodyCtx = this.newBlock
- if (finalizer != EmptyTree)
- bodyCtx.addFinalizer(finalizer, finalizerCtx)
-
- var finalCtx = body(bodyCtx)
- finalCtx = emitFinalizer(finalCtx)
-
- outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
-
- finalCtx.bb.closeWith(JUMP(afterCtx.bb))
-
- afterCtx
- }
-
-
- /** try-catch-finally blocks are actually simpler to emit in MSIL, because there
- * is support for `finally` in bytecode.
- *
- * A
- * try { .. } catch { .. } finally { .. }
- * block is de-sugared into
- * try { try { ..} catch { .. } } finally { .. }
- *
- * In ICode `finally` block is represented exactly the same as an exception handler,
- * but with `NoSymbol` as the exception class. The covered blocks are all blocks of
- * the `try { .. } catch { .. }`.
- *
- * Also, TryMsil does not enter any Finalizers into the `cleanups`, because the
- * CLI takes care of running the finalizer when seeing a `leave` statement inside
- * a try / catch.
- */
- def TryMsil(body: Context => Context,
- handlers: List[(Symbol, TypeKind, (Context => Context))],
- finalizer: Tree,
- tree: Tree) = {
-
- val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
- val finalizerCtx = this.dup // context for generating finalizer handler
- val afterCtx = outerCtx.newBlock
-
- if (finalizer != EmptyTree) {
- // finalizer is covers try and all catch blocks, i.e.
- // try { try { .. } catch { ..} } finally { .. }
- val exh = outerCtx.newExceptionHandler(NoSymbol, UNIT, tree.pos)
- this.addActiveHandler(exh)
- val ctx = finalizerCtx.enterExceptionHandler(exh)
- loadException(ctx, exh, tree.pos)
- val ctx1 = genLoad(finalizer, ctx, UNIT)
- // need jump for the ICode to be valid. MSIL backend will emit `Endfinally` instead.
- ctx1.bb.closeWith(JUMP(afterCtx.bb))
- finalizerCtx.endHandler()
- }
-
- for (handler <- handlers) {
- val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos)
- var ctx1 = outerCtx.enterExceptionHandler(exh)
- loadException(ctx1, exh, tree.pos)
- ctx1 = handler._3(ctx1)
- // msil backend will emit `Leave` to jump out of a handler
- ctx1.bb.closeWith(JUMP(afterCtx.bb))
- outerCtx.endHandler()
}
- val bodyCtx = this.newBlock
+ val bodyCtx = this.newBlock()
+ bodyCtx.bb killIf outerCtx.bb.ignore
+ if (finalizer != EmptyTree)
+ bodyCtx.addFinalizer(finalizer, finalizerCtx)
- val finalCtx = body(bodyCtx)
+ val bodyEndCtx = body(bodyCtx)
outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
- // msil backend will emit `Leave` to jump out of a try-block
- finalCtx.bb.closeWith(JUMP(afterCtx.bb))
+ normalExitReachable ||= !bodyEndCtx.bb.ignore
+ normalExitCtx.bb killUnless normalExitReachable
+ bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
- afterCtx
+ emitFinalizer(normalExitCtx)
}
}
}
@@ -2249,7 +2109,7 @@ abstract class GenICode extends SubComponent {
/** Add an instruction that refers to this label. */
def addCallingInstruction(i: Instruction) =
- toPatch = i :: toPatch;
+ toPatch = i :: toPatch
/**
* Patch the code by replacing pseudo call instructions with
@@ -2311,7 +2171,7 @@ abstract class GenICode extends SubComponent {
// register with the given label
if (!label.anchored)
- label.addCallingInstruction(this);
+ label.addCallingInstruction(this)
}
case class PJUMP(whereto: Label) extends PseudoJUMP(whereto)
@@ -2341,7 +2201,6 @@ abstract class GenICode extends SubComponent {
val locals: ListBuffer[Local] = new ListBuffer
def add(l: Local) = locals += l
- def remove(l: Local) = locals -= l
/** Return all locals that are in scope. */
def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index f05def3123..82fdcbbc04 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -9,7 +9,6 @@ package icode
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.symtab._
abstract class ICodeCheckers {
val global: Global
@@ -49,7 +48,7 @@ abstract class ICodeCheckers {
* @author Iulian Dragos
* @version 1.0, 06/09/2005
*
- * @todo Better checks for <code>MONITOR_ENTER/EXIT</code>
+ * @todo Better checks for `MONITOR_ENTER/EXIT`
* Better checks for local var initializations
*
* @todo Iulian says: I think there's some outdated logic in the checker.
@@ -103,7 +102,6 @@ abstract class ICodeCheckers {
private def posStr(p: Position) =
if (p.isDefined) p.line.toString else "<??>"
- private def indent(s: String, spaces: Int): String = indent(s, " " * spaces)
private def indent(s: String, prefix: String): String = {
val lines = s split "\\n"
lines map (prefix + _) mkString "\n"
@@ -121,11 +119,11 @@ abstract class ICodeCheckers {
clasz = cls
for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2)
- if (isConfict(f1, f2, false))
+ if (isConfict(f1, f2, canOverload = false))
icodeError("Repetitive field name: " + f1.symbol.fullName)
for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2)
- if (isConfict(m1, m2, true))
+ if (isConfict(m1, m2, canOverload = true))
icodeError("Repetitive method: " + m1.symbol.fullName)
clasz.methods foreach check
@@ -170,7 +168,6 @@ abstract class ICodeCheckers {
val preds = bl.predecessors
def hasNothingType(s: TypeStack) = s.nonEmpty && (s.head == NothingReference)
- def hasNullType(s: TypeStack) = s.nonEmpty && (s.head == NullReference)
/** XXX workaround #1: one stack empty, the other has BoxedUnit.
* One example where this arises is:
@@ -211,7 +208,7 @@ abstract class ICodeCheckers {
if (s1.length != s2.length) {
if (allUnits(s1) && allUnits(s2))
workaround("Ignoring mismatched boxed units")
- else if (isHandlerBlock)
+ else if (isHandlerBlock())
workaround("Ignoring mismatched stacks entering exception handler")
else
throw new CheckerException(incompatibleString)
@@ -236,8 +233,8 @@ abstract class ICodeCheckers {
}
if (preds.nonEmpty) {
- in(bl) = (preds map out.apply) reduceLeft meet2;
- log("Input changed for block: " + bl +" to: " + in(bl));
+ in(bl) = (preds map out.apply) reduceLeft meet2
+ log("Input changed for block: " + bl +" to: " + in(bl))
}
}
@@ -296,7 +293,7 @@ abstract class ICodeCheckers {
else prefix + " with initial stack " + initial.types.mkString("[", ", ", "]")
})
- var stack = new TypeStack(initial)
+ val stack = new TypeStack(initial)
def checkStack(len: Int) {
if (stack.length < len)
ICodeChecker.this.icodeError("Expected at least " + len + " elements on the stack", stack)
@@ -324,14 +321,14 @@ abstract class ICodeCheckers {
def popStackN(num: Int, instrFn: () => String = defaultInstrPrinter) = {
List.range(0, num) map { _ =>
val res = _popStack
- printStackString(false, res, instrFn())
+ printStackString(isPush = false, res, instrFn())
res
}
}
def pushStackN(xs: Seq[TypeKind], instrFn: () => String) = {
xs foreach { x =>
stack push x
- printStackString(true, x, instrFn())
+ printStackString(isPush = true, x, instrFn())
}
}
@@ -354,7 +351,7 @@ abstract class ICodeCheckers {
def typeError(k1: TypeKind, k2: TypeKind) {
icodeError("\n expected: " + k1 + "\n found: " + k2)
}
- def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 <:< k2) || {
+ def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 isAssignabledTo k2) || {
import platform.isMaybeBoxed
(k1, k2) match {
@@ -369,11 +366,6 @@ abstract class ICodeCheckers {
}
}
- /** Return true if k1 is a subtype of any of the following types,
- * according to the somewhat relaxed subtyping standards in effect here.
- */
- def isOneOf(k1: TypeKind, kinds: TypeKind*) = kinds exists (k => isSubtype(k1, k))
-
def subtypeTest(k1: TypeKind, k2: TypeKind): Unit =
if (isSubtype(k1, k2)) ()
else typeError(k2, k1)
@@ -381,17 +373,16 @@ abstract class ICodeCheckers {
for (instr <- b) {
this.instruction = instr
- def checkLocal(local: Local): Unit = {
- method lookupLocal local.sym.name getOrElse {
- icodeError(" " + local + " is not defined in method " + method)
- }
+ def checkLocal(local: Local) {
+ if ((method lookupLocal local.sym.name).isEmpty)
+ icodeError(s" $local is not defined in method $method")
}
def checkField(obj: TypeKind, field: Symbol): Unit = obj match {
case REFERENCE(sym) =>
if (sym.info.member(field.name) == NoSymbol)
- icodeError(" " + field + " is not defined in class " + clasz);
+ icodeError(" " + field + " is not defined in class " + clasz)
case _ =>
- icodeError(" expected reference type, but " + obj + " found");
+ icodeError(" expected reference type, but " + obj + " found")
}
/** Checks that tpe is a subtype of one of the allowed types */
@@ -422,20 +413,17 @@ abstract class ICodeCheckers {
}
/** Checks that the object passed as receiver has a method
- * <code>method</code> and that it is callable from the current method.
- *
- * @param receiver ...
- * @param method ...
+ * `method` and that it is callable from the current method.
*/
def checkMethod(receiver: TypeKind, method: Symbol) =
receiver match {
case REFERENCE(sym) =>
checkBool(sym.info.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + sym.fullName);
+ "Method " + method + " does not exist in " + sym.fullName)
if (method.isPrivate)
checkBool(method.owner == clasz.symbol,
"Cannot call private method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
else if (method.isProtected) {
val isProtectedOK = (
(clasz.symbol isSubClass method.owner) ||
@@ -444,7 +432,7 @@ abstract class ICodeCheckers {
checkBool(isProtectedOK,
"Cannot call protected method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
}
case ARRAY(_) =>
@@ -476,8 +464,8 @@ abstract class ICodeCheckers {
subtypeTest(elem, kind)
pushStack(elem)
case (a, b) =>
- icodeError(" expected and INT and a array reference, but " +
- a + ", " + b + " found");
+ icodeError(" expected an INT and an array reference, but " +
+ a + ", " + b + " found")
}
case LOAD_LOCAL(local) =>
@@ -495,10 +483,10 @@ abstract class ICodeCheckers {
case LOAD_MODULE(module) =>
checkBool((module.isModule || module.isModuleClass),
- "Expected module: " + module + " flags: " + Flags.flagsToString(module.flags));
- pushStack(toTypeKind(module.tpe));
+ "Expected module: " + module + " flags: " + module.flagString)
+ pushStack(toTypeKind(module.tpe))
- case STORE_THIS(kind) =>
+ case STORE_THIS(kind) =>
val actualType = popStack
if (actualType.isReferenceType) subtypeTest(actualType, kind)
else icodeError("Expected this reference but found: " + actualType)
@@ -510,7 +498,7 @@ abstract class ICodeCheckers {
subtypeTest(k, elem)
case (a, b, c) =>
icodeError(" expected and array reference, and int and " + kind +
- " but " + a + ", " + b + ", " + c + " found");
+ " but " + a + ", " + b + ", " + c + " found")
}
case STORE_LOCAL(local) =>
@@ -606,7 +594,7 @@ abstract class ICodeCheckers {
case x if style.hasInstance => x + 1
case x => x
}
- if (style == Static(true))
+ if (style == Static(onInstance = true))
checkBool(method.isPrivate || method.isConstructor, "Static call to non-private method.")
checkStack(paramCount)
@@ -665,7 +653,7 @@ abstract class ICodeCheckers {
case RETURN(kind) =>
val top = popStack
if (kind.isValueType) checkType(top, kind)
- else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not");
+ else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not")
case THROW(clasz) =>
checkType(popStack, toTypeKind(clasz.tpe))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 93201089e4..e2d387c65d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -8,8 +8,6 @@ package backend
package icode
import java.io.PrintWriter
-import scala.collection.mutable
-import scala.tools.nsc.symtab._
import analysis.{ Liveness, ReachingDefinitions }
import scala.tools.nsc.symtab.classfile.ICodeReader
@@ -30,14 +28,14 @@ abstract class ICodes extends AnyRef
with Repository
{
val global: Global
- import global.{ log, definitions, settings, perRunCaches }
+ import global.{ log, definitions, settings, perRunCaches, devWarning }
/** The ICode representation of classes */
val classes = perRunCaches.newMap[global.Symbol, IClass]()
/** Debugging flag */
def shouldCheckIcode = settings.check contains global.genicode.phaseName
- def checkerDebug(msg: String) = if (shouldCheckIcode && global.opt.debug) println(msg)
+ def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug.value) println(msg)
/** The ICode linearizer. */
val linearizer: Linearizer = settings.Xlinearizer.value match {
@@ -84,7 +82,7 @@ abstract class ICodes extends AnyRef
// Something is leaving open/empty blocks around (see SI-4840) so
// let's not kill the deal unless it's nonempty.
if (b.isEmpty) {
- log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
+ devWarning(s"Found open but empty block while inlining $m: removing from block list.")
m.code removeBlock b
}
else dumpMethodAndAbort(m, b)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index a38eab4515..c5fe3228a3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package backend
package icode
-import scala.tools.nsc.ast._
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
@@ -36,15 +35,15 @@ trait Linearizers {
var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.startBlock;
- blocks = Nil;
+ val b = m.startBlock
+ blocks = Nil
run {
- worklist pushAll (m.exh map (_.startBlock));
- worklist.push(b);
+ worklist pushAll (m.exh map (_.startBlock))
+ worklist.push(b)
}
- blocks.reverse;
+ blocks.reverse
}
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -56,30 +55,30 @@ trait Linearizers {
/** Linearize another subtree and append it to the existing blocks. */
def linearize(startBlock: BasicBlock): List[BasicBlock] = {
//blocks = startBlock :: Nil;
- run( { worklist.push(startBlock); } );
- blocks.reverse;
+ run( { worklist.push(startBlock); } )
+ blocks.reverse
}
def processElement(b: BasicBlock) =
if (b.nonEmpty) {
- add(b);
+ add(b)
b.lastInstruction match {
case JUMP(whereto) =>
- add(whereto);
+ add(whereto)
case CJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case CZJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case SWITCH(_, labels) =>
- add(labels);
- case RETURN(_) => ();
- case THROW(clasz) => ();
+ add(labels)
+ case RETURN(_) => ()
+ case THROW(clasz) => ()
}
}
- def dequeue: Elem = worklist.pop;
+ def dequeue: Elem = worklist.pop()
/**
* Prepend b to the list, if not already scheduled.
@@ -89,25 +88,25 @@ trait Linearizers {
if (blocks.contains(b))
()
else {
- blocks = b :: blocks;
- worklist push b;
+ blocks = b :: blocks
+ worklist push b
}
}
- def add(bs: List[BasicBlock]): Unit = bs foreach add;
+ def add(bs: List[BasicBlock]): Unit = bs foreach add
}
/**
* Linearize code using a depth first traversal.
*/
class DepthFirstLinerizer extends Linearizer {
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
- dfs(m.startBlock);
- m.exh foreach (b => dfs(b.startBlock));
+ dfs(m.startBlock)
+ m.exh foreach (b => dfs(b.startBlock))
blocks.reverse
}
@@ -120,7 +119,7 @@ trait Linearizers {
def dfs(b: BasicBlock): Unit =
if (b.nonEmpty && add(b))
- b.successors foreach dfs;
+ b.successors foreach dfs
/**
* Prepend b to the list, if not already scheduled.
@@ -129,7 +128,7 @@ trait Linearizers {
*/
def add(b: BasicBlock): Boolean =
!(blocks contains b) && {
- blocks = b :: blocks;
+ blocks = b :: blocks
true
}
}
@@ -145,12 +144,12 @@ trait Linearizers {
val added = new mutable.BitSet
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
visited.clear()
- added.clear;
+ added.clear()
- m.exh foreach (b => rpo(b.startBlock));
- rpo(m.startBlock);
+ m.exh foreach (b => rpo(b.startBlock))
+ rpo(m.startBlock)
// if the start block has predecessors, it won't be the first one
// in the linearization, so we need to enforce it here
@@ -171,7 +170,7 @@ trait Linearizers {
def rpo(b: BasicBlock): Unit =
if (b.nonEmpty && !visited(b)) {
- visited += b;
+ visited += b
b.successors foreach rpo
add(b)
}
@@ -185,7 +184,7 @@ trait Linearizers {
if (!added(b.label)) {
added += b.label
- blocks = b :: blocks;
+ blocks = b :: blocks
}
}
}
@@ -198,142 +197,4 @@ trait Linearizers {
def linearize(m: IMethod): List[BasicBlock] = m.blocks
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
}
-
- /** The MSIL linearizer is used only for methods with at least one exception handler.
- * It makes sure that all the blocks belonging to a `try`, `catch` or `finally` block
- * are emitted in an order that allows the lexical nesting of try-catch-finally, just
- * like in the source code.
- */
- class MSILLinearizer extends Linearizer {
- /** The MSIL linearizer first calls a NormalLInearizer. This is because the ILGenerator checks
- * the stack size before emitting instructions. For instance, to emit a `store`, there needs
- * to be some value on the stack. This can blow up in situations like this:
- * ...
- * jump 3
- * 4: store_local 0
- * jump 5
- * 3: load_value
- * jump 4
- * 5: ...
- * here, 3 must be scheduled first.
- *
- * The NormalLinearizer also removes dead blocks (blocks without predecessor). This is important
- * in the following example:
- * try { throw new Exception }
- * catch { case e => throw e }
- * which adds a dead block containing just a "throw" (which, again, would blow up code generation
- * because of the stack size; there's no value on the stack when emitting that `throw`)
- */
- val normalLinearizer = new NormalLinearizer()
-
- def linearize(m: IMethod): List[BasicBlock] = {
-
- val handlersByCovered = m.exh.groupBy(_.covered)
-
- // number of basic blocks covered by the entire try-catch expression
- def size(covered: scala.collection.immutable.Set[BasicBlock]) = {
- val hs = handlersByCovered(covered)
- covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
- }
-
- val tryBlocks = handlersByCovered.keys.toList sortBy size
- var result = normalLinearizer.linearize(m)
- val frozen = mutable.HashSet[BasicBlock](result.head)
-
- for (tryBlock <- tryBlocks) {
- result = groupBlocks(m, result, handlersByCovered(tryBlock), frozen)
- }
- result
- }
-
- /** @param handlers a list of handlers covering the same blocks (same try, multiple catches)
- * @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
- */
- def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = {
- assert(blocks.head == method.startBlock, method)
-
- // blocks before the try, and blocks for the try
- val beforeAndTry = new ListBuffer[BasicBlock]()
- // blocks for the handlers
- val catches = handlers map (_ => new ListBuffer[BasicBlock]())
- // blocks to be put at the end
- val after = new ListBuffer[BasicBlock]()
-
- var beforeTry = true
- val head = handlers.head
-
- for (b <- blocks) {
- if (head covers b) {
- beforeTry = false
- beforeAndTry += b
- } else {
- val handlerIndex = handlers.indexWhere(_.blocks.contains(b))
- if (handlerIndex >= 0) {
- catches(handlerIndex) += b
- } else if (beforeTry) {
- beforeAndTry += b
- } else {
- after += b
- }
- }
- }
-
- // reorder the blocks in "catches" so that the "firstBlock" is actually first
- (catches, handlers).zipped foreach { (lb, handler) =>
- lb -= handler.startBlock
- handler.startBlock +=: lb
- }
-
- // The first block emitted after a try-catch must be the one that the try / catch
- // blocks jump to (because in msil, these jumps cannot be emitted manually)
- var firstAfter: Option[BasicBlock] = None
-
- // Find the (hopefully) unique successor, look at the try and all catch blocks
- var blks = head.covered.toList :: handlers.map(_.blocks)
- while (firstAfter.isEmpty && !blks.isEmpty) {
- val b = blks.head
- blks = blks.tail
-
- val leaving = leavingBlocks(b)
- // no leaving blocks when the try or catch ends with THROW or RET
- if (!leaving.isEmpty) {
- assert(leaving.size <= 1, leaving)
- firstAfter = Some(leaving.head)
- }
- }
- if (firstAfter.isDefined) {
- val b = firstAfter.get
- if (frozen(b)) {
- assert(after contains b, b +", "+ method)
- } else {
- frozen += b
- if (beforeAndTry contains b) {
- beforeAndTry -= b
- } else {
- assert(after contains b, after)
- after -= b
- }
- b +=: after
- }
- }
-
- for (lb <- catches) { beforeAndTry ++= lb }
- beforeAndTry ++= after
- beforeAndTry.toList
- }
-
- /** Returns all direct successors of `blocks` wich are not part
- * that list, i.e. successors outside the `blocks` list.
- */
- private def leavingBlocks(blocks: List[BasicBlock]) = {
- val res = new mutable.HashSet[BasicBlock]()
- for (b <- blocks; s <- b.directSuccessors; if (!blocks.contains(s)))
- res += s
- res
- }
-
- def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
- sys.error("not implemented")
- }
- }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 7ba212f42e..e471f4256b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -7,10 +7,8 @@ package scala.tools.nsc
package backend
package icode
-import java.io.PrintWriter
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import symtab.Flags.{ DEFERRED }
trait ReferenceEquality {
override def hashCode = System.identityHashCode(this)
@@ -48,28 +46,34 @@ trait Members {
def touched = _touched
def touched_=(b: Boolean): Unit = {
- if (b)
- blocks foreach (_.touched = true)
+ @annotation.tailrec def loop(xs: List[BasicBlock]) {
+ xs match {
+ case Nil =>
+ case x :: xs => x.touched = true ; loop(xs)
+ }
+ }
+ if (b) loop(blocks.toList)
_touched = b
}
// Constructor code
- startBlock = newBlock
+ startBlock = newBlock()
def removeBlock(b: BasicBlock) {
if (settings.debug.value) {
- assert(blocks forall (p => !(p.successors contains b)),
- "Removing block that is still referenced in method code " + b + "preds: " + b.predecessors
- )
- assert(b != startBlock || b.successors.length == 1,
- "Removing start block with more than one successor."
- )
+ // only do this sanity check when debug is turned on because it's moderately expensive
+ val referers = blocks filter (_.successors contains b)
+ assert(referers.isEmpty, s"Trying to removing block $b (with preds ${b.predecessors.mkString}) but it is still refered to from block(s) ${referers.mkString}")
}
- if (b == startBlock)
+ if (b == startBlock) {
+ assert(b.successors.length == 1,
+ s"Removing start block ${b} with ${b.successors.length} successors (${b.successors.mkString})."
+ )
startBlock = b.successors.head
-
+ }
+
blocks -= b
assert(!blocks.contains(b))
method.exh filter (_ covers b) foreach (_.covered -= b)
@@ -77,7 +81,7 @@ trait Members {
}
/** This methods returns a string representation of the ICode */
- override def toString = "ICode '" + name + "'";
+ override def toString = "ICode '" + name + "'"
/* Compute a unique new label */
def nextLabel: Int = {
@@ -89,8 +93,8 @@ trait Members {
*/
def newBlock(): BasicBlock = {
touched = true
- val block = new BasicBlock(nextLabel, method);
- blocks += block;
+ val block = new BasicBlock(nextLabel, method)
+ blocks += block
block
}
}
@@ -112,25 +116,23 @@ trait Members {
var cunit: CompilationUnit = _
def addField(f: IField): this.type = {
- fields = f :: fields;
+ fields = f :: fields
this
}
def addMethod(m: IMethod): this.type = {
- methods = m :: methods;
+ methods = m :: methods
this
}
def setCompilationUnit(unit: CompilationUnit): this.type = {
- this.cunit = unit;
+ this.cunit = unit
this
}
override def toString() = symbol.fullName
- def lookupField(s: Symbol) = fields find (_.symbol == s)
def lookupMethod(s: Symbol) = methods find (_.symbol == s)
- def lookupMethod(s: Name) = methods find (_.symbol.name == s)
/* returns this methods static ctor if it has one. */
def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor)
@@ -154,14 +156,13 @@ trait Members {
class IMethod(val symbol: Symbol) extends IMember {
var code: Code = NoCode
- def newBlock() = code.newBlock
+ def newBlock() = code.newBlock()
def startBlock = code.startBlock
def lastBlock = { assert(blocks.nonEmpty, symbol); blocks.last }
def blocks = code.blocksList
def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f
- def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f)
var native = false
@@ -180,7 +181,7 @@ trait Members {
def hasCode = code ne NoCode
def setCode(code: Code): IMethod = {
- this.code = code;
+ this.code = code
this
}
@@ -194,7 +195,6 @@ trait Members {
}
def addLocals(ls: List[Local]) = ls foreach addLocal
- def addParams(as: List[Local]) = as foreach addParam
def lookupLocal(n: Name): Option[Local] = locals find (_.sym.name == n)
def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym)
@@ -209,28 +209,7 @@ trait Members {
override def toString() = symbol.fullName
- def matchesSignature(other: IMethod) = {
- (symbol.name == other.symbol.name) &&
- (params corresponds other.params)(_.kind == _.kind) &&
- (returnType == other.returnType)
- }
-
import opcodes._
- def checkLocals(): Unit = {
- def localsSet = (code.blocks flatMap { bb =>
- bb.iterator collect {
- case LOAD_LOCAL(l) => l
- case STORE_LOCAL(l) => l
- }
- }).toSet
-
- if (hasCode) {
- log("[checking locals of " + this + "]")
- locals filterNot localsSet foreach { l =>
- log("Local " + l + " is not declared in " + this)
- }
- }
- }
/** Merge together blocks that have a single successor which has a
* single predecessor. Exception handlers are taken into account (they
@@ -242,10 +221,10 @@ trait Members {
val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
for (b <- code.blocks.toList
if b.successors.length == 1;
- succ = b.successors.head;
- if succ ne b;
- if succ.predecessors.length == 1;
- if succ.predecessors.head eq b;
+ succ = b.successors.head
+ if succ ne b
+ if succ.predecessors.length == 1
+ if succ.predecessors.head eq b
if !(exh.exists { (e: ExceptionHandler) =>
(e.covers(succ) && !e.covers(b)) || (e.covers(b) && !e.covers(succ)) })) {
nextBlock(b) = succ
@@ -254,10 +233,10 @@ trait Members {
var bb = code.startBlock
while (!nextBlock.isEmpty) {
if (nextBlock.isDefinedAt(bb)) {
- bb.open
+ bb.open()
var succ = bb
do {
- succ = nextBlock(succ);
+ succ = nextBlock(succ)
val lastInstr = bb.lastInstruction
/* Ticket SI-5672
* Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
@@ -268,7 +247,7 @@ trait Members {
val oldTKs = lastInstr.consumedTypes
assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr)
- bb.removeLastInstruction
+ bb.removeLastInstruction()
for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
succ.toList foreach { i => bb.emit(i, i.pos) }
code.removeBlock(succ)
@@ -276,9 +255,9 @@ trait Members {
nextBlock -= bb
} while (nextBlock.isDefinedAt(succ))
- bb.close
+ bb.close()
} else
- bb = nextBlock.keysIterator.next
+ bb = nextBlock.keysIterator.next()
}
checkValid(this)
}
@@ -296,9 +275,6 @@ trait Members {
/** Starting PC for this local's visibility range. */
var start: Int = _
- /** Ending PC for this local's visibility range. */
- var end: Int = _
-
/** PC-based ranges for this local variable's visibility */
var ranges: List[(Int, Int)] = Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 8c9a72638d..d8aac8e9db 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -3,13 +3,10 @@
* @author Martin Odersky
*/
-
-
package scala.tools.nsc
package backend
package icode
-import scala.tools.nsc.ast._
import scala.reflect.internal.util.{Position,NoPosition}
/*
@@ -67,7 +64,7 @@ import scala.reflect.internal.util.{Position,NoPosition}
* in the source files.
*/
trait Opcodes { self: ICodes =>
- import global.{Symbol, NoSymbol, Type, Name, Constant};
+ import global.{Symbol, NoSymbol, Name, Constant}
// categories of ICode instructions
final val localsCat = 1
@@ -111,17 +108,11 @@ trait Opcodes { self: ICodes =>
// Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have
def producedTypes: List[TypeKind] = Nil
- /** This method returns the difference of size of the stack when the instruction is used */
- def difference = produced-consumed
-
/** The corresponding position in the source file */
private var _pos: Position = NoPosition
def pos: Position = _pos
- /** Used by dead code elimination. */
- var useful: Boolean = false
-
def setPos(p: Position): this.type = {
_pos = p
this
@@ -133,13 +124,6 @@ trait Opcodes { self: ICodes =>
}
object opcodes {
-
- def mayThrow(i: Instruction): Boolean = i match {
- case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _)
- | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false
- case _ => true
- }
-
/** Loads "this" on top of the stack.
* Stack: ...
* ->: ...:ref
@@ -211,7 +195,7 @@ trait Opcodes { self: ICodes =>
case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString());
+ "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString())
override def consumed = if (isStatic) 0 else 1
override def produced = 1
@@ -273,16 +257,17 @@ trait Opcodes { self: ICodes =>
case class STORE_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)");
+ "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)")
- override def consumed = if(isStatic) 1 else 2;
- override def produced = 0;
+ override def consumed = if(isStatic) 1 else 2
+
+ override def produced = 0
override def consumedTypes =
if (isStatic)
toTypeKind(field.tpe) :: Nil
else
- REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil;
+ REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil
override def category = fldsCat
}
@@ -436,10 +421,12 @@ trait Opcodes { self: ICodes =>
*/
case class NEW(kind: REFERENCE) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String = "NEW "+ kind;
+ override def toString(): String = "NEW "+ kind
+
+ override def consumed = 0
+
+ override def produced = 1
- override def consumed = 0;
- override def produced = 1;
override def producedTypes = kind :: Nil
/** The corresponding constructor call. */
@@ -455,11 +442,13 @@ trait Opcodes { self: ICodes =>
*/
case class CREATE_ARRAY(elem: TypeKind, dims: Int) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims;
+ override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims
+
+ override def consumed = dims
- override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
- override def produced = 1;
+ override def produced = 1
+
override def producedTypes = ARRAY(elem) :: Nil
override def category = arraysCat
@@ -548,7 +537,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 2
override def produced = 0
@@ -571,7 +560,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CZJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 1
override def produced = 0
@@ -663,10 +652,11 @@ trait Opcodes { self: ICodes =>
*/
case class MONITOR_EXIT() extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="MONITOR_EXIT";
+ override def toString(): String ="MONITOR_EXIT"
- override def consumed = 1;
- override def produced = 0;
+ override def consumed = 1
+
+ override def produced = 0
override def consumedTypes = ObjectReference :: Nil
@@ -715,8 +705,6 @@ trait Opcodes { self: ICodes =>
/** Is this a static method call? */
def isStatic: Boolean = false
- def isSuper: Boolean = false
-
/** Is this an instance method call? */
def hasInstance: Boolean = true
@@ -750,77 +738,7 @@ trait Opcodes { self: ICodes =>
* On JVM, translated to `invokespecial`.
*/
case class SuperCall(mix: Name) extends InvokeStyle {
- override def isSuper = true
override def toString(): String = { "super(" + mix + ")" }
}
-
-
- // CLR backend
-
- case class CIL_LOAD_LOCAL_ADDRESS(local: Local) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = "CIL_LOAD_LOCAL_ADDRESS "+local //+isArgument?" (argument)":"";
-
- override def consumed = 0
- override def produced = 1
-
- override def producedTypes = msil_mgdptr(local.kind) :: Nil
-
- override def category = localsCat
- }
-
- case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String =
- "CIL_LOAD_FIELD_ADDRESS " + (if (isStatic) field.fullName else field.toString)
-
- override def consumed = if (isStatic) 0 else 1
- override def produced = 1
-
- override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil;
- override def producedTypes = msil_mgdptr(REFERENCE(field.owner)) :: Nil;
-
- override def category = fldsCat
- }
-
- case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = "CIL_LOAD_ARRAY_ITEM_ADDRESS (" + kind + ")"
-
- override def consumed = 2
- override def produced = 1
-
- override def consumedTypes = ARRAY(kind) :: INT :: Nil
- override def producedTypes = msil_mgdptr(kind) :: Nil
-
- override def category = arraysCat
- }
-
- case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
- override def toString(): String = "CIL_UNBOX " + valueType
- override def consumed = 1
- override def consumedTypes = ObjectReferenceList // actually consumes a 'boxed valueType'
- override def produced = 1
- override def producedTypes = msil_mgdptr(valueType) :: Nil
- override def category = objsCat
- }
-
- case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
- override def toString(): String = "CIL_INITOBJ " + valueType
- override def consumed = 1
- override def consumedTypes = ObjectReferenceList // actually consumes a managed pointer
- override def produced = 0
- override def category = objsCat
- }
-
- case class CIL_NEWOBJ(method: Symbol) extends Instruction {
- override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName
- var hostClass: Symbol = method.owner;
- override def consumed = method.tpe.paramTypes.length
- override def consumedTypes = method.tpe.paramTypes map toTypeKind
- override def produced = 1
- override def producedTypes = toTypeKind(method.tpe.resultType) :: Nil
- override def category = objsCat
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index c8579041ba..4fa717309e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package backend
-package icode;
+package icode
-import java.io.PrintWriter;
+import java.io.PrintWriter
trait Primitives { self: ICodes =>
@@ -51,12 +51,12 @@ trait Primitives { self: ICodes =>
// type : (src) => dst
// range: src,dst <- { Ix, Ux, Rx }
// jvm : i2{l, f, d}, l2{i, f, d}, f2{i, l, d}, d2{i, l, f}, i2{b, c, s}
- case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive;
+ case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive
// type : (Array[REF]) => I4
// range: type <- { BOOL, Ix, Ux, Rx, REF }
// jvm : arraylength
- case class ArrayLength(kind: TypeKind) extends Primitive;
+ case class ArrayLength(kind: TypeKind) extends Primitive
// type : (buf,el) => buf
// range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
@@ -76,25 +76,12 @@ trait Primitives { self: ICodes =>
/** Pretty printer for primitives */
class PrimitivePrinter(out: PrintWriter) {
-
def print(s: String): PrimitivePrinter = {
out.print(s)
this
}
def print(o: AnyRef): PrimitivePrinter = print(o.toString())
-
- def printPrimitive(prim: Primitive) = prim match {
- case Negation(kind) =>
- print("!")
-
- case Test(op, kind, zero) =>
- print(op).print(kind)
-
- case Comparison(op, kind) =>
- print(op).print("(").print(kind)
-
- }
}
/** This class represents a comparison operation. */
@@ -243,9 +230,9 @@ trait Primitives { self: ICodes =>
/** Returns a string representation of this operation. */
override def toString(): String = this match {
- case AND => return "AND"
- case OR => return "OR"
- case XOR => return "XOR"
+ case AND => "AND"
+ case OR => "OR"
+ case XOR => "XOR"
case _ => throw new RuntimeException("LogicalOp unknown case")
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index 6cac641e3e..5b47e3cfff 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -8,13 +8,9 @@ package backend
package icode
import java.io.PrintWriter
-import scala.tools.nsc.symtab.Flags
-import scala.reflect.internal.util.Position
trait Printers { self: ICodes =>
import global._
- import global.icodes.opcodes._
- import global.icodes._
class TextPrinter(writer: PrintWriter, lin: Linearizer) {
private var margin = 0
@@ -31,15 +27,15 @@ trait Printers { self: ICodes =>
def print(o: Any) { print(o.toString()) }
def println(s: String) {
- print(s);
- println
+ print(s)
+ println()
}
def println() {
out.println()
var i = 0
while (i < margin) {
- print(" ");
+ print(" ")
i += 1
}
}
@@ -57,26 +53,26 @@ trait Printers { self: ICodes =>
}
def printClass(cls: IClass) {
- print(cls.symbol.toString()); print(" extends ");
- printList(cls.symbol.info.parents, ", ");
- indent; println(" {");
- println("// fields:");
- cls.fields.foreach(printField); println;
- println("// methods");
- cls.methods.foreach(printMethod);
- undent; println;
+ print(cls.symbol.toString()); print(" extends ")
+ printList(cls.symbol.info.parents, ", ")
+ indent(); println(" {")
+ println("// fields:")
+ cls.fields.foreach(printField); println()
+ println("// methods")
+ cls.methods.foreach(printMethod)
+ undent(); println()
println("}")
}
def printField(f: IField) {
- print(f.symbol.keyString); print(" ");
- print(f.symbol.nameString); print(": ");
- println(f.symbol.info.toString());
+ print(f.symbol.keyString); print(" ")
+ print(f.symbol.nameString); print(": ")
+ println(f.symbol.info.toString())
}
def printMethod(m: IMethod) {
- print("def "); print(m.symbol.name);
- print("("); printList(printParam)(m.params, ", "); print(")");
+ print("def "); print(m.symbol.name)
+ print("("); printList(printParam)(m.params, ", "); print(")")
print(": "); print(m.symbol.info.resultType)
if (!m.isAbstractMethod) {
@@ -84,40 +80,40 @@ trait Printers { self: ICodes =>
println("locals: " + m.locals.mkString("", ", ", ""))
println("startBlock: " + m.startBlock)
println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
- println
+ println()
lin.linearize(m) foreach printBlock
println("}")
- indent; println("Exception handlers: ")
+ indent(); println("Exception handlers: ")
m.exh foreach printExceptionHandler
- undent; println
+ undent(); println()
} else
- println
+ println()
}
def printParam(p: Local) {
- print(p.sym.name); print(": "); print(p.sym.info);
+ print(p.sym.name); print(": "); print(p.sym.info)
print(" ("); print(p.kind); print(")")
}
def printExceptionHandler(e: ExceptionHandler) {
- indent;
- println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock);
- println("consisting of blocks: " + e.blocks);
- undent;
- println("with finalizer: " + e.finalizer);
-// linearizer.linearize(e.startBlock) foreach printBlock;
+ indent()
+ println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock)
+ println("consisting of blocks: " + e.blocks)
+ undent()
+ println("with finalizer: " + e.finalizer)
+ // linearizer.linearize(e.startBlock) foreach printBlock;
}
def printBlock(bb: BasicBlock) {
print(bb.label)
if (bb.loopHeader) print("[loop header]")
- print(": ");
+ print(": ")
if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
- indent; println
+ indent(); println()
bb.toList foreach printInstruction
- undent; println
+ undent(); println()
}
def printInstruction(i: Instruction) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index e73015c4da..e92e61c957 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -26,17 +26,6 @@ trait Repository {
/** The icode of the given class, if available */
def icode(sym: Symbol): Option[IClass] = (classes get sym) orElse (loaded get sym)
- /** The icode of the given class. If not available, it loads
- * its bytecode.
- */
- def icode(sym: Symbol, force: Boolean): IClass =
- icode(sym) getOrElse {
- log("loading " + sym)
- load(sym)
- assert(available(sym))
- loaded(sym)
- }
-
/** Load bytecode for given symbol. */
def load(sym: Symbol): Boolean = {
try {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 4f8fda8024..1875c8c914 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -55,7 +55,7 @@ trait TypeKinds { self: ICodes =>
def toType: Type = reversePrimitiveMap get this map (_.tpe) getOrElse {
this match {
- case REFERENCE(cls) => cls.tpe
+ case REFERENCE(cls) => cls.tpe_*
case ARRAY(elem) => arrayType(elem.toType)
case _ => abort("Unknown type kind.")
}
@@ -66,7 +66,6 @@ trait TypeKinds { self: ICodes =>
def isValueType = false
def isBoxedType = false
final def isRefOrArrayType = isReferenceType || isArrayType
- final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType
final def isNothingType = this == NothingReference
final def isNullType = this == NullReference
final def isInterfaceType = this match {
@@ -89,11 +88,20 @@ trait TypeKinds { self: ICodes =>
final def isNumericType: Boolean = isIntegralType | isRealType
/** Simple subtyping check */
- def <:<(other: TypeKind): Boolean = (this eq other) || (this match {
- case BOOL | BYTE | SHORT | CHAR => other == INT || other == LONG
- case _ => this eq other
- })
+ def <:<(other: TypeKind): Boolean
+ /**
+ * this is directly assignable to other if no coercion or
+ * casting is needed to convert this to other. It's a distinct
+ * relationship from <:< because on the JVM, BOOL, BYTE, CHAR,
+ * SHORT need no coercion to INT even though JVM arrays
+ * are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT]
+ */
+ final def isAssignabledTo(other: TypeKind): Boolean = other match {
+ case INT => this.isIntSizedType
+ case _ => this <:< other
+ }
+
/** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
def isWideType: Boolean = false
@@ -112,10 +120,9 @@ trait TypeKinds { self: ICodes =>
override def toString = {
this.getClass.getName stripSuffix "$" dropWhile (_ != '$') drop 1
}
+ def <:<(other: TypeKind): Boolean = this eq other
}
- var lubs0 = 0
-
/**
* The least upper bound of two typekinds. They have to be either
* REFERENCE or ARRAY kinds.
@@ -139,8 +146,7 @@ trait TypeKinds { self: ICodes =>
* Here we make the adjustment by rewinding to a pre-erasure state and
* sifting through the parents for a class type.
*/
- def lub0(tk1: TypeKind, tk2: TypeKind): Type = beforeUncurry {
- import definitions._
+ def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry {
val tp = global.lub(List(tk1.toType, tk2.toType))
val (front, rest) = tp.parents span (_.typeSymbol.isTrait)
@@ -284,7 +290,7 @@ trait TypeKinds { self: ICodes =>
}
/** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = isNothingType || (other match {
+ def <:<(other: TypeKind) = isNothingType || (other match {
case REFERENCE(cls2) => cls.tpe <:< cls2.tpe
case ARRAY(_) => cls == NullClass
case _ => false
@@ -298,7 +304,7 @@ trait TypeKinds { self: ICodes =>
else ARRAY(ArrayN(elem, dims - 1))
}
- final case class ARRAY(val elem: TypeKind) extends TypeKind {
+ final case class ARRAY(elem: TypeKind) extends TypeKind {
override def toString = "ARRAY[" + elem + "]"
override def isArrayType = true
override def dimensions = 1 + elem.dimensions
@@ -322,7 +328,7 @@ trait TypeKinds { self: ICodes =>
/** Array subtyping is covariant, as in Java. Necessary for checking
* code that interacts with Java. */
- override def <:<(other: TypeKind) = other match {
+ def <:<(other: TypeKind) = other match {
case ARRAY(elem2) => elem <:< elem2
case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
case _ => false
@@ -340,7 +346,7 @@ trait TypeKinds { self: ICodes =>
}
/** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = other match {
+ def <:<(other: TypeKind) = other match {
case BOXED(`kind`) => true
case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
case _ => false
@@ -353,6 +359,7 @@ trait TypeKinds { self: ICodes =>
*/
case object ConcatClass extends TypeKind {
override def toString = "ConcatClass"
+ def <:<(other: TypeKind): Boolean = this eq other
/**
* Approximate `lub`. The common type of two references is
@@ -363,19 +370,16 @@ trait TypeKinds { self: ICodes =>
case REFERENCE(_) => AnyRefReference
case _ => uncomparable(other)
}
-
- /** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = this eq other
}
////////////////// Conversions //////////////////////////////
/** Return the TypeKind of the given type
*
- * Call to .normalize fixes #3003 (follow type aliases). Otherwise,
+ * Call to dealiasWiden fixes #3003 (follow type aliases). Otherwise,
* arrayOrClassType below would return ObjectReference.
*/
- def toTypeKind(t: Type): TypeKind = t.normalize match {
+ def toTypeKind(t: Type): TypeKind = t.dealiasWiden match {
case ThisType(ArrayClass) => ObjectReference
case ThisType(sym) => REFERENCE(sym)
case SingleType(_, sym) => primitiveOrRefType(sym)
@@ -431,11 +435,4 @@ trait TypeKinds { self: ICodes =>
primitiveTypeMap.getOrElse(sym, newReference(sym))
private def primitiveOrClassType(sym: Symbol, targs: List[Type]) =
primitiveTypeMap.getOrElse(sym, arrayOrClassType(sym, targs))
-
- def msil_mgdptr(tk: TypeKind): TypeKind = (tk: @unchecked) match {
- case REFERENCE(cls) => REFERENCE(loaders.clrTypes.mdgptrcls4clssym(cls))
- // TODO have ready class-symbols for the by-ref versions of built-in valuetypes
- case _ => abort("cannot obtain a managed pointer for " + tk)
- }
-
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 23d3d05c64..57d51dad49 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -15,15 +15,11 @@ package icode
trait TypeStacks {
self: ICodes =>
- import opcodes._
-
/* This class simulates the type of the operand
* stack of the ICode.
*/
type Rep = List[TypeKind]
- object NoTypeStack extends TypeStack(Nil) { }
-
class TypeStack(var types: Rep) {
if (types.nonEmpty)
checkerDebug("Created " + this)
@@ -71,14 +67,6 @@ trait TypeStacks {
def apply(n: Int): TypeKind = types(n)
- /**
- * A TypeStack agrees with another one if they have the same
- * length and each type kind agrees position-wise. Two
- * types agree if one is a subtype of the other.
- */
- def agreesWith(other: TypeStack): Boolean =
- (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1)
-
/* This method returns a String representation of the stack */
override def toString() =
if (types.isEmpty) "[]"
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 53111d0ade..152a11ab1a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -26,12 +26,8 @@ abstract class CopyPropagation {
case object This extends Location
/** Values that can be on the stack. */
- abstract class Value {
- def isRecord = false
- }
- case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value {
- override def isRecord = true
- }
+ abstract class Value { }
+ case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { }
/** The value of some location in memory. */
case class Deref(l: Location) extends Value
@@ -91,16 +87,6 @@ abstract class CopyPropagation {
loop(l) getOrElse Deref(LocalVar(l))
}
- /* Return the binding for the given field of the given record */
- def getBinding(r: Record, f: Symbol): Value = {
- assert(r.bindings contains f, "Record " + r + " does not contain a field " + f)
-
- r.bindings(f) match {
- case Deref(LocalVar(l)) => getBinding(l)
- case target => target
- }
- }
-
/** Return a local which contains the same value as this field, if any.
* If the field holds a reference to a local, the returned value is the
* binding of that local.
@@ -137,7 +123,7 @@ abstract class CopyPropagation {
}
override def toString(): String =
- "\nBindings: " + bindings + "\nStack: " + stack;
+ "\nBindings: " + bindings + "\nStack: " + stack
def dup: State = {
val b: Bindings = mutable.HashMap()
@@ -178,7 +164,7 @@ abstract class CopyPropagation {
val resBindings = mutable.HashMap[Location, Value]()
for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k))
- resBindings += (k -> v);
+ resBindings += (k -> v)
new State(resBindings, resStack)
}
}
@@ -203,11 +189,11 @@ abstract class CopyPropagation {
debuglog("CopyAnalysis added point: " + b)
}
m.exh foreach { e =>
- in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
+ in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack)
}
// first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+ in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil)
}
}
@@ -216,7 +202,7 @@ abstract class CopyPropagation {
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(in(b) != lattice.bottom,
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
@@ -241,7 +227,7 @@ abstract class CopyPropagation {
case CONSTANT(k) =>
if (k.tag != UnitTag)
- out.stack = Const(k) :: out.stack;
+ out.stack = Const(k) :: out.stack
case LOAD_ARRAY_ITEM(_) =>
out.stack = (Unknown :: out.stack.drop(2))
@@ -290,14 +276,14 @@ abstract class CopyPropagation {
v match {
case Deref(LocalVar(other)) =>
if (other != local)
- out.bindings += (LocalVar(local) -> v);
+ out.bindings += (LocalVar(local) -> v)
case _ =>
out.bindings += (LocalVar(local) -> v)
}
case Nil =>
sys.error("Incorrect icode in " + method + ". Expecting something on the stack.")
}
- out.stack = out.stack drop 1;
+ out.stack = out.stack drop 1
case STORE_THIS(_) =>
cleanReferencesTo(out, This)
@@ -305,14 +291,14 @@ abstract class CopyPropagation {
case STORE_FIELD(field, isStatic) =>
if (isStatic)
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
else {
- out.stack = out.stack.drop(2);
- cleanReferencesTo(out, Field(AllRecords, field));
+ out.stack = out.stack.drop(2)
+ cleanReferencesTo(out, Field(AllRecords, field))
in.stack match {
case v :: Record(_, bindings) :: vs =>
bindings += (field -> v)
- case _ => ();
+ case _ => ()
}
}
@@ -322,7 +308,7 @@ abstract class CopyPropagation {
case CALL_METHOD(method, style) => style match {
case Dynamic =>
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
case Static(onInstance) =>
if (onInstance) {
@@ -333,19 +319,19 @@ abstract class CopyPropagation {
case Record(_, bindings) =>
for (v <- out.stack.take(method.info.paramTypes.length + 1)
if v ne obj) {
- bindings ++= getBindingsForPrimaryCtor(in, method);
+ bindings ++= getBindingsForPrimaryCtor(in, method)
}
case _ => ()
}
// put the Record back on the stack and remove the 'returned' value
out.stack = out.stack.drop(1 + method.info.paramTypes.length)
} else
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
} else
- out = simulateCall(in, method, true)
+ out = simulateCall(in, method, static = true)
case SuperCall(_) =>
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
}
case BOX(tpe) =>
@@ -404,7 +390,7 @@ abstract class CopyPropagation {
out.stack = out.stack.head :: out.stack
case MONITOR_ENTER() =>
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
case MONITOR_EXIT() =>
out.stack = out.stack.drop(1)
@@ -452,7 +438,7 @@ abstract class CopyPropagation {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
case rec @ Record(_, _) =>
- cleanRecord(rec);
+ cleanRecord(rec)
true
case _ => true
}) &&
@@ -463,22 +449,17 @@ abstract class CopyPropagation {
}
}
- /** Update the state <code>s</code> after the call to <code>method</code>.
+ /** Update the state `s` after the call to `method`.
* The stack elements are dropped and replaced by the result of the call.
* If the method is impure, all bindings to record fields are cleared.
- *
- * @param state ...
- * @param method ...
- * @param static ...
- * @return ...
*/
final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = {
- val out = new copyLattice.State(state.bindings, state.stack);
- out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1));
+ val out = new copyLattice.State(state.bindings, state.stack)
+ out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1))
if (method.info.resultType != definitions.UnitClass.tpe && !method.isConstructor)
- out.stack = Unknown :: out.stack;
+ out.stack = Unknown :: out.stack
if (!isPureMethod(method))
- invalidateRecords(out);
+ invalidateRecords(out)
out
}
@@ -519,8 +500,8 @@ abstract class CopyPropagation {
* they are passed on the stack. It works for primary constructors.
*/
private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
- val paramAccessors = ctor.owner.constrParamAccessors;
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+ val paramAccessors = ctor.owner.constrParamAccessors
+ var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1)
val bindings = mutable.HashMap[Symbol, Value]()
debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
@@ -546,18 +527,15 @@ abstract class CopyPropagation {
// + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
// + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
if (p.tpe == paramTypes(i))
- bindings += (p -> values.head);
- values = values.tail;
+ bindings += (p -> values.head)
+ values = values.tail
}
debuglog("\t" + bindings)
bindings
}
- /** Is symbol <code>m</code> a pure method?
- *
- * @param m ...
- * @return ...
+ /** Is symbol `m` a pure method?
*/
final def isPureMethod(m: Symbol): Boolean =
m.isGetter // abstract getters are still pure, as we 'know'
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 04c3eedbad..ebc2d33a62 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -30,16 +30,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
/* Implement this function to initialize the worklist. */
def init(f: => Unit): Unit = {
iterations = 0
- in.clear; out.clear; worklist.clear; visited.clear;
- f
- }
-
- /** Reinitialize, but keep the old solutions. Should be used when reanalyzing the
- * same method, after some code transformation.
- */
- def reinit(f: => Unit): Unit = {
- iterations = 0
- worklist.clear; visited.clear;
+ in.clear(); out.clear(); worklist.clear(); visited.clear()
f
}
@@ -55,7 +46,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
while (!worklist.isEmpty) {
if (stat) iterations += 1
//Console.println("worklist in: " + worklist);
- val point = worklist.iterator.next; worklist -= point; visited += point;
+ val point = worklist.iterator.next(); worklist -= point; visited += point
//Console.println("taking out point: " + point + " worklist out: " + worklist);
val output = f(point, in(point))
@@ -82,17 +73,13 @@ trait DataFlowAnalysis[L <: SemiLattice] {
sys.error("Could not find element " + e.getMessage)
}
- /** ...
- *
- * @param f ...
- */
def backwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit =
while (worklist.nonEmpty) {
if (stat) iterations += 1
val point = worklist.head
worklist -= point
- out(point) = lattice.lub(point.successors map in.apply, false) // TODO check for exception handlers
+ out(point) = lattice.lub(point.successors map in.apply, exceptional = false) // TODO check for exception handlers
val input = f(point, out(point))
if ((lattice.bottom == in(point)) || input != in(point)) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index abda639dec..14b57f287f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -77,7 +77,7 @@ abstract class Liveness {
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 2717c432e8..2d29e6b14f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -52,7 +52,7 @@ abstract class ReachingDefinitions {
// it makes it harder to spot the real problems.
val result = (a.stack, b.stack).zipped map (_ ++ _)
if (settings.debug.value && (a.stack.length != b.stack.length))
- debugwarn("Mismatched stacks in ReachingDefinitions#lub2: " + a.stack + ", " + b.stack + ", returning " + result)
+ devWarning(s"Mismatched stacks in ReachingDefinitions#lub2: ${a.stack}, ${b.stack}, returning $result")
result
}
)
@@ -147,7 +147,7 @@ abstract class ReachingDefinitions {
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
+ ": bot: " + lattice.bottom
+ "\nin(b) == bottom: " + (in(b) == lattice.bottom)
- + "\nbottom == in(b): " + (lattice.bottom == in(b))));
+ + "\nbottom == in(b): " + (lattice.bottom == in(b))))
}
}
@@ -155,7 +155,7 @@ abstract class ReachingDefinitions {
import lattice.IState
def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = {
val STORE_LOCAL(local) = b(idx)
- var tmp = local
+ val tmp = local
(rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx))
}
@@ -197,7 +197,7 @@ abstract class ReachingDefinitions {
def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) {
assert(bb.closed, bb)
- var instrs = bb.getArray
+ val instrs = bb.getArray
var res: List[(BasicBlock, Int)] = Nil
var i = idx
var n = m
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index b2ecb431ee..227c1064ea 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -68,7 +68,6 @@ abstract class TypeFlowAnalysis {
* names to types and a type stack.
*/
object typeFlowLattice extends SemiLattice {
- import icodes._
type Elem = IState[VarBinding, icodes.TypeStack]
val top = new Elem(new VarBinding, typeStackLattice.top)
@@ -132,15 +131,15 @@ abstract class TypeFlowAnalysis {
init(m)
}
- def run = {
- timer.start
+ def run() = {
+ timer.start()
// icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
- val t = timer.stop
+ timer.stop
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(visited.contains(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited))
}
// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
// + "\n\t" + iterations + " iterations: " + t + " ms."
@@ -208,7 +207,7 @@ abstract class TypeFlowAnalysis {
case Test(_, kind, zero) =>
stack.pop
if (!zero) { stack.pop }
- stack push BOOL;
+ stack push BOOL
case Comparison(_, _) => stack.pop2; stack push INT
@@ -269,36 +268,6 @@ abstract class TypeFlowAnalysis {
out
} // interpret
-
- class SimulatedStack {
- private var types: List[InferredType] = Nil
- private var depth = 0
-
- /** Remove and return the topmost element on the stack. If the
- * stack is empty, return a reference to a negative index on the
- * stack, meaning it refers to elements pushed by a predecessor block.
- */
- def pop: InferredType = types match {
- case head :: rest =>
- types = rest
- head
- case _ =>
- depth -= 1
- TypeOfStackPos(depth)
- }
-
- def pop2: (InferredType, InferredType) = {
- (pop, pop)
- }
-
- def push(t: InferredType) {
- depth += 1
- types = types ::: List(t)
- }
-
- def push(k: TypeKind) { push(Const(k)) }
- }
-
abstract class InferredType {
/** Return the type kind pointed by this inferred type. */
def getKind(in: lattice.Elem): icodes.TypeKind = this match {
@@ -326,7 +295,6 @@ abstract class TypeFlowAnalysis {
class TransferFunction(consumed: Int, gens: List[Gen]) extends (lattice.Elem => lattice.Elem) {
def apply(in: lattice.Elem): lattice.Elem = {
val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val bindings = out.vars
val stack = out.stack
out.stack.pop(consumed)
@@ -387,9 +355,9 @@ abstract class TypeFlowAnalysis {
override def run {
- timer.start
+ timer.start()
forwardAnalysis(blockTransfer)
- val t = timer.stop
+ timer.stop
/* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
@@ -428,7 +396,7 @@ abstract class TypeFlowAnalysis {
override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+ val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null
var isPastLast = false
var instrs = b.toList
@@ -546,9 +514,6 @@ abstract class TypeFlowAnalysis {
relevantBBs ++= blocks
}
- /* the argument is also included in the result */
- private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) }
-
/* those BBs in the argument are also included in the result */
private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
val result = mutable.Set.empty[BasicBlock]
@@ -562,19 +527,6 @@ abstract class TypeFlowAnalysis {
result.toSet
}
- /* those BBs in the argument are also included in the result */
- private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
- val result = mutable.Set.empty[BasicBlock]
- var toVisit: List[BasicBlock] = starters.toList.distinct
- while(toVisit.nonEmpty) {
- val h = toVisit.head
- toVisit = toVisit.tail
- result += h
- for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
- }
- result.toSet
- }
-
/* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
* In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
* In particular we can do without computing the outflow at B. */
@@ -646,10 +598,10 @@ abstract class TypeFlowAnalysis {
return
} else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
// this promotes invoking reinit if in doubt, no performance degradation will ensue!
- return;
+ return
}
- worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
+ worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
// asserts conveying an idea what CFG shapes arrive here:
// staleIn foreach (p => assert( !in.isDefinedAt(p), p))
@@ -685,12 +637,6 @@ abstract class TypeFlowAnalysis {
if(!worklist.contains(b)) { worklist += b }
}
- /* this is not a general purpose method to add to the worklist,
- * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
- private def enqueue(bs: Traversable[BasicBlock]) {
- bs foreach enqueue
- }
-
private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
blocks foreach { b =>
in(b) = typeFlowLattice.bottom
@@ -719,14 +665,14 @@ abstract class TypeFlowAnalysis {
override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
while (!worklist.isEmpty && relevantBBs.nonEmpty) {
if (stat) iterations += 1
- val point = worklist.iterator.next; worklist -= point;
+ val point = worklist.iterator.next(); worklist -= point
if(relevantBBs(point)) {
shrinkedWatchlist = false
val output = f(point, in(point))
- visited += point;
+ visited += point
if(isOnPerimeter(point)) {
if(shrinkedWatchlist && !isWatching(point)) {
- relevantBBs -= point;
+ relevantBBs -= point
populatePerimeter()
}
} else {
@@ -761,10 +707,6 @@ abstract class TypeFlowAnalysis {
private var lastStart = 0L
- def reset() {
- millis = 0L
- }
-
def start() {
lastStart = System.currentTimeMillis
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index fb1f45fa40..c1cd3204e0 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -9,9 +9,7 @@ package backend.jvm
import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile }
import scala.tools.nsc.io._
import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.JavapClass
-import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
-import Attributes.Name
+import java.util.jar.Attributes.Name
import scala.language.postfixOps
/** For the last mile: turning generated bytecode in memory into
@@ -23,7 +21,7 @@ trait BytecodeWriters {
import global._
private def outputDirectory(sym: Symbol): AbstractFile = (
- settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile))
+ settings.outputDirs.outputDirFor(enteringFlatten(sym.sourceFile))
)
private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = {
var dir = base
@@ -60,30 +58,6 @@ trait BytecodeWriters {
override def close() = writer.close()
}
- trait JavapBytecodeWriter extends BytecodeWriter {
- val baseDir = Directory(settings.Ygenjavap.value).createDirectory()
-
- def emitJavap(bytes: Array[Byte], javapFile: io.File) {
- val pw = javapFile.printWriter()
- val javap = new JavapClass(ScalaClassLoader.appLoader, pw) {
- override def findBytes(path: String): Array[Byte] = bytes
- }
-
- try javap(Seq("-verbose", "dummy")) foreach (_.show())
- finally pw.close()
- }
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
-
- val bytes = getFile(sym, jclassName, ".class").toByteArray
- val segments = jclassName.split("[./]")
- val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
-
- javapFile.parent.createDirectory()
- emitJavap(bytes, javapFile)
- }
- }
-
trait ClassBytecodeWriter extends BytecodeWriter {
def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
val outfile = getFile(sym, jclassName, ".class")
@@ -102,7 +76,7 @@ trait BytecodeWriters {
super.writeClass(label, jclassName, jclassBytes, sym)
val pathName = jclassName
- var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
+ val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
dumpFile.parent.createDirectory()
val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 9b16327ffc..4a3d1805d9 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -6,14 +6,12 @@
package scala.tools.nsc
package backend.jvm
-import java.nio.ByteBuffer
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
import scala.tools.nsc.symtab._
-import scala.tools.nsc.io.AbstractFile
-
import scala.tools.asm
import asm.Label
+import scala.annotation.tailrec
/**
* @author Iulian Dragos (version 1.0, FJBG-based implementation)
@@ -32,6 +30,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** Create a new phase */
override def newPhase(p: Phase): Phase = new AsmPhase(p)
+ /** From the reference documentation of the Android SDK:
+ * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+ * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+ * which is an object implementing the `Parcelable.Creator` interface.
+ */
+ private val androidFieldName = newTermName("CREATOR")
+
+ private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
/** JVM code generation phase
*/
class AsmPhase(prev: Phase) extends ICodePhase(prev) {
@@ -39,7 +47,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
override def erasedTypes = true
def apply(cls: IClass) = sys.error("no implementation")
- val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+ // Lazy val; can't have eager vals in Phase constructors which may
+ // cause cycles before Global has finished initialization.
+ lazy val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
private def initBytecodeWriter(entryPoints: List[IClass]): BytecodeWriter = {
settings.outputDirs.getSingleOutput match {
@@ -62,14 +72,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
new DirectToJarfileWriter(f.file)
case _ =>
- if (settings.Ygenjavap.isDefault) {
- if(settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- }
- else new ClassBytecodeWriter with JavapBytecodeWriter { }
-
+ if (settings.Ydumpclasses.isDefault)
+ new ClassBytecodeWriter { }
+ else
+ new ClassBytecodeWriter with DumpBytecodeWriter { }
// TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
// Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
// (a) unreadable pickle;
@@ -133,7 +139,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/* don't javaNameCache.clear() because that causes the following tests to fail:
* test/files/run/macro-repl-dontexpand.scala
* test/files/jvm/interpreter.scala
- * TODO but why? what use could javaNameCache possibly see once GenJVM is over?
+ * TODO but why? what use could javaNameCache possibly see once GenASM is over?
*/
/* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification:
@@ -248,7 +254,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def isTopLevelModule(sym: Symbol): Boolean =
- afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+ exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
def isStaticModule(sym: Symbol): Boolean = {
sym.isModuleClass && !sym.isImplClass && !sym.isLifted
@@ -398,7 +404,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** basic functionality for class file building */
abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
- val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type]
val EMPTY_STRING_ARRAY = Array.empty[String]
val mdesc_arglessvoid = "()V"
@@ -443,8 +448,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
- val dest = new Array[Byte](len);
- System.arraycopy(b, offset, dest, 0, len);
+ val dest = new Array[Byte](len)
+ System.arraycopy(b, offset, dest, 0, len)
new asm.CustomAttr(name, dest)
}
@@ -466,7 +471,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** Specialized array conversion to prevent calling
* java.lang.reflect.Array.newInstance via TraversableOnce.toArray
*/
- def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a }
def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
// -----------------------------------------------------------------------------------------
@@ -516,7 +520,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* of inner class all until root class.
*/
def collectInnerClass(s: Symbol): Unit = {
- // TODO: some beforeFlatten { ... } which accounts for
+ // TODO: some enteringFlatten { ... } which accounts for
// being nested in parameterized classes (if we're going to selectively flatten.)
val x = innerClassSymbolFor(s)
if(x ne NoSymbol) {
@@ -531,7 +535,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
collectInnerClass(sym)
- var hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
+ val hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
val cachedJN = javaNameCache.getOrElseUpdate(sym, {
if (hasInternalName) { sym.javaBinaryName }
else { sym.javaSimpleName }
@@ -541,12 +545,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val internalName = cachedJN.toString()
val trackedSym = jsymbol(sym)
reverseJavaName.get(internalName) match {
- case None =>
+ case Some(oldsym) if oldsym.exists && trackedSym.exists =>
+ assert(
+ // In contrast, neither NothingClass nor NullClass show up bytecode-level.
+ (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)),
+ s"""|Different class symbols have the same bytecode-level internal name:
+ | name: $internalName
+ | oldsym: ${oldsym.fullNameString}
+ | tracked: ${trackedSym.fullNameString}
+ """.stripMargin
+ )
+ case _ =>
reverseJavaName.put(internalName, trackedSym)
- case Some(oldsym) =>
- assert((oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) ||
- (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)), // In contrast, neither NothingClass nor NullClass show up bytecode-level.
- "how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name: " + internalName)
}
}
@@ -588,7 +598,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def javaType(s: Symbol): asm.Type = {
if (s.isMethod) {
- val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
+ val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType)
asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
} else { javaType(s.tpe) }
}
@@ -619,7 +629,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
innerSym.rawname + innerSym.moduleSuffix
// add inner classes which might not have been referenced yet
- afterErasure {
+ exitingErasure {
for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
innerClassBuffer += m
}
@@ -810,7 +820,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (!needsGenericSignature(sym)) { return null }
- val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
+ val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
if (jsOpt.isEmpty) { return null }
@@ -827,10 +837,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// Run the signature parser to catch bogus signatures.
val isValidSignature = wrap {
// Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
- import scala.tools.asm.util.SignatureChecker
- if (sym.isMethod) { SignatureChecker checkMethodSignature sig } // requires asm-util.jar
- else if (sym.isTerm) { SignatureChecker checkFieldSignature sig }
- else { SignatureChecker checkClassSignature sig }
+ import scala.tools.asm.util.CheckClassAdapter
+ if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig } // requires asm-util.jar
+ else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig }
+ else { CheckClassAdapter checkClassSignature sig }
}
if(!isValidSignature) {
@@ -844,7 +854,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
if ((settings.check containsName phaseName)) {
- val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
+ val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
val bytecodeTpe = owner.thisType.memberInfo(sym)
if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
getCurrentCUnit().warning(sym.pos,
@@ -863,9 +873,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
- val ca = new Array[Char](bytes.size)
+ val ca = new Array[Char](bytes.length)
var idx = 0
- while(idx < bytes.size) {
+ while(idx < bytes.length) {
val b: Byte = bytes(idx)
assert((b & ~0x7f) == 0)
ca(idx) = b.asInstanceOf[Char]
@@ -882,7 +892,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
var prevOffset = 0
var offset = 0
var encLength = 0
- while(offset < bSeven.size) {
+ while(offset < bSeven.length) {
val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
val newEncLength = encLength.toLong + deltaEncLength
if(newEncLength >= 65535) {
@@ -1099,7 +1109,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
debuglog("Dumping mirror class for object: " + moduleClass)
val linkedClass = moduleClass.companionClass
- val linkedModule = linkedClass.companionSymbol
lazy val conflictingNames: Set[Name] = {
(linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
}
@@ -1125,16 +1134,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
trait JAndroidBuilder {
self: JPlainBuilder =>
- /** From the reference documentation of the Android SDK:
- * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
- * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
- * which is an object implementing the `Parcelable.Creator` interface.
- */
- private val androidFieldName = newTermName("CREATOR")
-
- private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
def isAndroidParcelableClass(sym: Symbol) =
(AndroidParcelableInterface != NoSymbol) &&
(sym.parentSymbols contains AndroidParcelableInterface)
@@ -1142,13 +1141,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */
def addCreatorCode(block: BasicBlock) {
val fieldSymbol = (
- clasz.symbol.newValue(newTermName(androidFieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+ clasz.symbol.newValue(androidFieldName, NoPosition, Flags.STATIC | Flags.FINAL)
setInfo AndroidCreatorClass.tpe
)
val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName)
clasz addField new IField(fieldSymbol)
- block emit CALL_METHOD(methodSymbol, Static(false))
- block emit STORE_FIELD(fieldSymbol, true)
+ block emit CALL_METHOD(methodSymbol, Static(onInstance = false))
+ block emit STORE_FIELD(fieldSymbol, isStatic = true)
}
def legacyAddCreatorCode(clinit: asm.MethodVisitor) {
@@ -1157,7 +1156,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jclass.visitField(
PublicStaticFinal,
- androidFieldName,
+ androidFieldName.toString,
tdesc_creator,
null, // no java-generic-signature
null // no initial value
@@ -1177,7 +1176,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
clinit.visitMethodInsn(
asm.Opcodes.INVOKEVIRTUAL,
moduleName,
- androidFieldName,
+ androidFieldName.toString,
asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*)
)
@@ -1185,7 +1184,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
clinit.visitFieldInsn(
asm.Opcodes.PUTSTATIC,
thisName,
- androidFieldName,
+ androidFieldName.toString,
tdesc_creator
)
}
@@ -1267,8 +1266,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// Additional interface parents based on annotations and other cues
def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
- case SerializableAttr => Some(SerializableClass)
- case CloneableAttr => Some(CloneableClass)
case RemoteAttr => Some(RemoteInterfaceClass)
case _ => None
}
@@ -1292,7 +1289,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
val ps = c.symbol.info.parents
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct
if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
@@ -1317,7 +1314,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
thisName = javaName(c.symbol) // the internal name of the class being emitted
val ps = c.symbol.info.parents
- val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol);
+ val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
val ifaces = getSuperInterfaces(c)
@@ -1371,7 +1368,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (lmoc != NoSymbol) {
// it must be a top level class (name contains no $s)
val isCandidateForForwarders = {
- afterPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
}
if (isCandidateForForwarders) {
log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
@@ -1432,7 +1429,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
assert(enclClass.isClass, enclClass)
val sym = enclClass.primaryConstructor
if (sym == NoSymbol) {
- log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass, clazz))
+ log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass))
} else {
debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym))
@@ -1607,19 +1604,19 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (isStaticModule(clasz.symbol)) {
// call object's private ctor from static ctor
lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
- lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
+ lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(onInstance = true))
}
if (isParcelableClass) { addCreatorCode(lastBlock) }
lastBlock emit RETURN(UNIT)
- lastBlock.close
+ lastBlock.close()
- method = m
+ method = m
jmethod = clinitMethod
jMethodName = CLASS_CONSTRUCTOR_NAME
jmethod.visitCode()
- genCode(m, false, true)
+ genCode(m, emitVars = false, isStatic = true)
jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
jmethod.visitEnd()
@@ -1675,7 +1672,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val kind = toTypeKind(const.typeValue)
val toPush: asm.Type =
if (kind.isValueType) classLiteral(kind)
- else javaType(kind);
+ else javaType(kind)
mv.visitLdcInsn(toPush)
case EnumTag =>
@@ -1698,12 +1695,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
object jcode {
- import asm.Opcodes;
-
- def aconst(cst: AnyRef) {
- if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) }
- else { jmethod.visitLdcInsn(cst) }
- }
+ import asm.Opcodes
final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
@@ -1798,8 +1790,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
- def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
- def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF(), label) }
+ def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) }
def emitIF_ACMP(cond: TestOp, label: asm.Label) {
assert((cond == EQ) || (cond == NE), cond)
val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
@@ -1867,10 +1859,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// use a table in which holes are filled with defaultBranch.
val keyRange = (keyMax - keyMin + 1)
val newBranches = new Array[asm.Label](keyRange)
- var oldPos = 0;
+ var oldPos = 0
var i = 0
while(i < keyRange) {
- val key = keyMin + i;
+ val key = keyMin + i
if (keys(oldPos) == key) {
newBranches(i) = branches(oldPos)
oldPos += 1
@@ -2069,7 +2061,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p.start + " to: " + p.end + " catching: " + e.cls);
+ " from: " + p.start + " to: " + p.end + " catching: " + e.cls)
val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
else javaName(e.cls)
jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
@@ -2093,8 +2085,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
def mergeWith(that: Interval): Interval = {
- val newStart = if(this.start <= that.start) this.lstart else that.lstart;
- val newEnd = if(this.end <= that.end) that.lend else this.lend;
+ val newStart = if(this.start <= that.start) this.lstart else that.lstart
+ val newEnd = if(this.end <= that.end) that.lend else this.lend
Interval(newStart, newEnd)
}
@@ -2150,7 +2142,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def getMerged(): scala.collection.Map[Local, List[Interval]] = {
// TODO should but isn't: unbalanced start(s) of scope(s)
- val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
+ val shouldBeEmpty = pending filter { p => val Pair(_, st) = p; st.nonEmpty }
val merged = mutable.Map[Local, List[Interval]]()
def addToMerged(lv: Local, start: Label, end: Label) {
val intv = Interval(start, end)
@@ -2168,10 +2160,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if(merged.isDefinedAt(k)) {
val balancedStart = merged(k).head.lstart
if(balancedStart.getOffset < start.getOffset) {
- start = balancedStart;
+ start = balancedStart
}
}
- val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend;
+ val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend
val end = endOpt.getOrElse(onePastLast)
addToMerged(k, start, end)
}
@@ -2204,7 +2196,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(Pair(local, ranges) <- scoping.getMerged()) {
var name = javaName(local.sym)
if (name == null) {
- anonCounter += 1;
+ anonCounter += 1
name = "<anon" + anonCounter + ">"
}
for(intrvl <- ranges) {
@@ -2213,7 +2205,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
// quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain).
val srtd = fltnd.sortBy { kr =>
- val Triple(name: String, local: Local, intrvl: Interval) = kr
+ val Triple(name: String, _, intrvl: Interval) = kr
Triple(intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name)
}
@@ -2333,6 +2325,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
import asm.Opcodes
(instr.category: @scala.annotation.switch) match {
+
case icodes.localsCat =>
def genLocalInstr() = (instr: @unchecked) match {
case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
@@ -2364,14 +2357,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
scoping.popScope(lv, end, instr.pos)
}
}
- genLocalInstr
+ genLocalInstr()
case icodes.stackCat =>
def genStackInstr() = (instr: @unchecked) match {
case LOAD_MODULE(module) =>
// assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString)
if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
jmethod.visitVarInsn(Opcodes.ALOAD, 0)
} else {
@@ -2388,7 +2381,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case LOAD_EXCEPTION(_) => ()
}
- genStackInstr
+ genStackInstr()
case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
@@ -2422,11 +2415,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genCastInstr
+ genCastInstr()
case icodes.objsCat =>
def genObjsInstr() = (instr: @unchecked) match {
-
case BOX(kind) =>
val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
jcode.invokestatic(BoxesRunTime, mname, mdesc)
@@ -2442,14 +2434,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
}
- genObjsInstr
+ genObjsInstr()
case icodes.fldsCat =>
def genFldsInstr() = (instr: @unchecked) match {
case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + field.owner.flagString)
val fieldJName = javaName(field)
val fieldDescr = descriptor(field)
val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
@@ -2463,7 +2455,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
}
- genFldsInstr
+ genFldsInstr()
case icodes.mthdsCat =>
def genMethodsInstr() = (instr: @unchecked) match {
@@ -2476,7 +2468,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case call @ CALL_METHOD(method, style) => genCallMethod(call)
}
- genMethodsInstr
+ genMethodsInstr()
case icodes.arraysCat =>
def genArraysInstr() = (instr: @unchecked) match {
@@ -2485,7 +2477,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case CREATE_ARRAY(elem, 1) => jcode newarray elem
case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
}
- genArraysInstr
+ genArraysInstr()
case icodes.jumpsCat =>
def genJumpInstr() = (instr: @unchecked) match {
@@ -2502,7 +2494,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
while (restTagss.nonEmpty) {
val currLabel = labels(restBranches.head)
for (cTag <- restTagss.head) {
- flatKeys(k) = cTag;
+ flatKeys(k) = cTag
flatBranches(k) = currLabel
k += 1
}
@@ -2515,27 +2507,19 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
case JUMP(whereto) =>
- if (nextBlock != whereto) {
+ if (nextBlock != whereto)
jcode goTo labels(whereto)
- } else if (m.exh.exists(eh => eh.covers(b))) {
// SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
// If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
- val isSthgLeft = b.toList.exists {
- case _: LOAD_EXCEPTION => false
- case _: SCOPE_ENTER => false
- case _: SCOPE_EXIT => false
- case _: JUMP => false
- case _ => true
- }
- if (!isSthgLeft) {
- emit(asm.Opcodes.NOP)
- }
+ else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) {
+ debugwarn("Had a jump only block that wasn't collapsed")
+ emit(asm.Opcodes.NOP)
}
case CJUMP(success, failure, cond, kind) =>
if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF_ICMP(cond.negate, labels(failure))
+ jcode.emitIF_ICMP(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ICMP(cond, labels(success))
@@ -2543,7 +2527,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
} else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
if (nextBlock == success) {
- jcode.emitIF_ACMP(cond.negate, labels(failure))
+ jcode.emitIF_ACMP(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ACMP(cond, labels(success))
@@ -2560,7 +2544,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
else emit(Opcodes.DCMPL)
}
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF(cond, labels(success))
@@ -2571,7 +2555,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case CZJUMP(success, failure, cond, kind) =>
if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
} else {
jcode.emitIF(cond, labels(success))
if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2607,7 +2591,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
else emit(Opcodes.DCMPL)
}
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
} else {
jcode.emitIF(cond, labels(success))
if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2615,14 +2599,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genJumpInstr
+ genJumpInstr()
case icodes.retCat =>
def genRetInstr() = (instr: @unchecked) match {
case RETURN(kind) => jcode emitRETURN kind
case THROW(_) => emit(Opcodes.ATHROW)
}
- genRetInstr
+ genRetInstr()
}
}
@@ -2633,8 +2617,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* @param to The type the value will be converted into.
*/
def emitT2T(from: TypeKind, to: TypeKind) {
- assert(isNonUnitValueTK(from), from)
- assert(isNonUnitValueTK(to), to)
+ assert(isNonUnitValueTK(from) && isNonUnitValueTK(to), s"Cannot emit primitive conversion from $from to $to")
def pickOne(opcs: Array[Int]) {
val chosen = (to: @unchecked) match {
@@ -2650,10 +2633,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
if(from == to) { return }
- if((from == BOOL) || (to == BOOL)) {
- // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
- throw new Error("inconvertible types : " + from.toString() + " -> " + to.toString())
- }
+ // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+ assert(from != BOOL && to != BOOL, "inconvertible types : $from -> $to")
if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already)
@@ -2701,7 +2682,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def genPrimitive(primitive: Primitive, pos: Position) {
- import asm.Opcodes;
+ import asm.Opcodes
primitive match {
@@ -2732,7 +2713,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
abort("Unknown arithmetic primitive " + primitive)
}
}
- genArith
+ genArith()
// TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
// TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
@@ -2764,7 +2745,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (kind != BOOL) { emitT2T(INT, kind) }
}
}
- genLogical
+ genLogical()
case Shift(op, kind) =>
def genShift() = op match {
@@ -2793,7 +2774,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
emitT2T(INT, kind)
}
}
- genShift
+ genShift()
case Comparison(op, kind) =>
def genCompare() = op match {
@@ -2813,12 +2794,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genCompare
+ genCompare()
case Conversion(src, dst) =>
debuglog("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) { println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) }
- else { emitT2T(src, dst) }
+ emitT2T(src, dst)
case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH)
@@ -2867,15 +2847,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
////////////////////// local vars ///////////////////////
- // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
- // def indexOf(m: IMethod, sym: Symbol): Int = {
- // val Some(local) = m lookupLocal sym
- // indexOf(local)
- // }
-
final def indexOf(local: Local): Int = {
assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
local.index
@@ -2886,7 +2859,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* *Does not assume the parameters come first!*
*/
def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1;
+ var idx = if (m.symbol.isStaticMember) 0 else 1
for (l <- m.params) {
debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
@@ -2908,7 +2881,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
private var cunit: CompilationUnit = _
- def getCurrentCUnit(): CompilationUnit = cunit;
+ def getCurrentCUnit(): CompilationUnit = cunit
/** Generate a mirror class for a top-level module. A mirror class is a class
* containing only static methods that forward to the corresponding method
@@ -3001,8 +2974,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (f <- clasz.fields if f.symbol.hasGetter;
g = f.symbol.getter(clasz.symbol);
- s = f.symbol.setter(clasz.symbol);
- if g.isPublic && !(f.symbol.name startsWith "$")
+ s = f.symbol.setter(clasz.symbol)
+ if g.isPublic && !(f.symbol.name startsWith "$")
) {
// inserting $outer breaks the bean
fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
@@ -3091,111 +3064,50 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
*/
object newNormal {
-
- def startsWithJump(b: BasicBlock): Boolean = { assert(b.nonEmpty, "empty block"); b.firstInstruction.isInstanceOf[JUMP] }
-
- /** Prune from an exception handler those covered blocks which are jump-only. */
- private def coverWhatCountsOnly(m: IMethod): Boolean = {
- assert(m.hasCode, "code-less method")
-
- var wasReduced = false
- for(h <- m.exh) {
- val shouldntCover = (h.covered filter startsWithJump)
- if(shouldntCover.nonEmpty) {
- wasReduced = true
- h.covered --= shouldntCover // not removing any block on purpose.
- }
- }
-
- wasReduced
+ /**
+ * True if a block is "jump only" which is defined
+ * as being a block that consists only of 0 or more instructions that
+ * won't make it to the JVM followed by a JUMP.
+ */
+ def isJumpOnly(b: BasicBlock): Boolean = {
+ val nonICode = firstNonIcodeOnlyInstructions(b)
+ // by definition a block has to have a jump, conditional jump, return, or throw
+ assert(nonICode.hasNext, "empty block")
+ nonICode.next.isInstanceOf[JUMP]
}
- /** An exception handler is pruned provided any of the following holds:
- * (1) it covers nothing (for example, this may result after removing unreachable blocks)
- * (2) each block it covers is of the form: JUMP(_)
- * Return true iff one or more ExceptionHandlers were removed.
- *
- * A caveat: removing an exception handler, for whatever reason, means that its handler code (even if unreachable)
- * won't be able to cause a class-loading-exception. As a result, behavior can be different.
+ /**
+ * Returns the list of instructions in a block that follow all ICode only instructions,
+ * where an ICode only instruction is one that won't make it to the JVM
*/
- private def elimNonCoveringExh(m: IMethod): Boolean = {
- assert(m.hasCode, "code-less method")
-
- def isRedundant(eh: ExceptionHandler): Boolean = {
- (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol`
- eh.covered.isEmpty
- || (eh.covered forall startsWithJump)
- )
- }
-
- var wasReduced = false
- val toPrune = (m.exh.toSet filter isRedundant)
- if(toPrune.nonEmpty) {
- wasReduced = true
- for(h <- toPrune; r <- h.blocks) { m.code.removeBlock(r) } // TODO m.code.removeExh(h)
- m.exh = (m.exh filterNot toPrune)
- }
-
- wasReduced
+ private def firstNonIcodeOnlyInstructions(b: BasicBlock): Iterator[Instruction] = {
+ def isICodeOnlyInstruction(i: Instruction) = i match {
+ case LOAD_EXCEPTION(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) => true
+ case _ => false
+ }
+ b.iterator dropWhile isICodeOnlyInstruction
}
- private def isJumpOnly(b: BasicBlock): Option[BasicBlock] = {
- b.toList match {
- case JUMP(whereto) :: rest =>
- assert(rest.isEmpty, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
+ /**
+ * Returns the target of a block that is "jump only" which is defined
+ * as being a block that consists only of 0 or more instructions that
+ * won't make it to the JVM followed by a JUMP.
+ *
+ * @param b The basic block to examine
+ * @return Some(target) if b is a "jump only" block or None if it's not
+ */
+ private def getJumpOnlyTarget(b: BasicBlock): Option[BasicBlock] = {
+ val nonICode = firstNonIcodeOnlyInstructions(b)
+ // by definition a block has to have a jump, conditional jump, return, or throw
+ assert(nonICode.nonEmpty, "empty block")
+ nonICode.next match {
+ case JUMP(whereto) =>
+ assert(!nonICode.hasNext, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
Some(whereto)
case _ => None
}
}
- private def directSuccStar(b: BasicBlock): List[BasicBlock] = { directSuccStar(List(b)) }
-
- /** Transitive closure of successors potentially reachable due to normal (non-exceptional) control flow.
- Those BBs in the argument are also included in the result */
- private def directSuccStar(starters: Traversable[BasicBlock]): List[BasicBlock] = {
- val result = new mutable.ListBuffer[BasicBlock]
- var toVisit: List[BasicBlock] = starters.toList.distinct
- while(toVisit.nonEmpty) {
- val h = toVisit.head
- toVisit = toVisit.tail
- result += h
- for(p <- h.directSuccessors; if !result.contains(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
- }
- result.toList
- }
-
- /** Returns:
- * for single-block self-loops, the pair (start, Nil)
- * for other cycles, the pair (backedge-target, basic-blocks-in-the-cycle-except-backedge-target)
- * otherwise a pair consisting of:
- * (a) the endpoint of a (single or multi-hop) chain of JUMPs
- * (such endpoint does not start with a JUMP and therefore is not part of the chain); and
- * (b) the chain (ie blocks to be removed when collapsing the chain of jumps).
- * Precondition: the BasicBlock given as argument starts with an unconditional JUMP.
- */
- private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = {
- assert(startsWithJump(start), "not the start of a (single or multi-hop) chain of JUMPs.")
- var hops: List[BasicBlock] = Nil
- var prev = start
- var done = false
- do {
- done = isJumpOnly(prev) match {
- case Some(dest) =>
- if (dest == start) { return (start, hops) } // leave infinite-loops in place
- hops ::= prev
- if (hops.contains(dest)) {
- // leave infinite-loops in place
- return (dest, hops filterNot (dest eq _))
- }
- prev = dest;
- false
- case None => true
- }
- } while(!done)
-
- (prev, hops)
- }
-
/**
* Collapse a chain of "jump-only" blocks such as:
*
@@ -3211,7 +3123,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* In more detail:
* Starting at each of the entry points (m.startBlock, the start block of each exception handler)
* rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D.
- * The blocks thus skipped are also removed from IMethod.blocks.
+ * The blocks thus skipped become eligible to removed by the reachability analyzer
*
* Rationale for this normalization:
* test/files/run/private-inline.scala after -optimize is chock full of
@@ -3222,106 +3134,164 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question)
* could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range"
* Now that visitTryCatchBlock() must be called before Labels are resolved,
- * this method gets rid of the BasicBlocks described above (to recap, consisting of just a JUMP).
+ * renders the BasicBlocks described above (to recap, consisting of just a JUMP) unreachable.
*/
- private def collapseJumpOnlyBlocks(m: IMethod): Boolean = {
+ private def collapseJumpOnlyBlocks(m: IMethod) {
assert(m.hasCode, "code-less method")
- /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */
- def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = {
- assert(startsWithJump(jumpStart), "not part of a jump-chain")
- val Pair(dest, redundants) = finalDestination(jumpStart)
- (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap
- }
+ def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) {
+ def lookup(b: BasicBlock) = detour.getOrElse(b, b)
- def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) {
- for(Pair(oldTarget, newTarget) <- detour.iterator) {
- if(m.startBlock == oldTarget) {
- m.code.startBlock = newTarget
- }
- for(eh <- m.exh; if eh.startBlock == oldTarget) {
- eh.setStartBlock(newTarget)
- }
- for(b <- m.blocks; if !detour.isDefinedAt(b)) {
- val idxLast = (b.size - 1)
- b.lastInstruction match {
- case JUMP(whereto) =>
- if (whereto == oldTarget) {
- b.replaceInstruction(idxLast, JUMP(newTarget))
- }
- case CJUMP(succ, fail, cond, kind) =>
- if ((succ == oldTarget) || (fail == oldTarget)) {
- b.replaceInstruction(idxLast, CJUMP(detour.getOrElse(succ, succ),
- detour.getOrElse(fail, fail),
- cond, kind))
- }
- case CZJUMP(succ, fail, cond, kind) =>
- if ((succ == oldTarget) || (fail == oldTarget)) {
- b.replaceInstruction(idxLast, CZJUMP(detour.getOrElse(succ, succ),
- detour.getOrElse(fail, fail),
- cond, kind))
- }
- case SWITCH(tags, labels) =>
- if(labels exists (detour.isDefinedAt(_))) {
- val newLabels = (labels map { lab => detour.getOrElse(lab, lab) })
- b.replaceInstruction(idxLast, SWITCH(tags, newLabels))
- }
- case _ => ()
- }
- }
+ m.code.startBlock = lookup(m.code.startBlock)
+
+ for(eh <- m.exh)
+ eh.setStartBlock(lookup(eh.startBlock))
+
+ for (b <- m.blocks) {
+ def replaceLastInstruction(i: Instruction) = {
+ if (b.lastInstruction != i) {
+ val idxLast = b.size - 1
+ debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}")
+ b.replaceInstruction(idxLast, i)
}
}
- /* remove from all containers that may contain a reference to */
- def elide(redu: BasicBlock) {
- assert(m.startBlock != redu, "startBlock should have been re-wired by now")
- m.code.removeBlock(redu);
+ b.lastInstruction match {
+ case JUMP(whereto) =>
+ replaceLastInstruction(JUMP(lookup(whereto)))
+ case CJUMP(succ, fail, cond, kind) =>
+ replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind))
+ case CZJUMP(succ, fail, cond, kind) =>
+ replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind))
+ case SWITCH(tags, labels) =>
+ val newLabels = (labels map lookup)
+ replaceLastInstruction(SWITCH(tags, newLabels))
+ case _ => ()
}
+ }
+ }
- var wasReduced = false
- val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock));
-
- var elided = mutable.Set.empty[BasicBlock] // debug
- var newTargets = mutable.Set.empty[BasicBlock] // debug
-
- for (ep <- entryPoints) {
- var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over
- while(reachable.nonEmpty) {
- val h = reachable.head
- reachable = reachable.tail
- if(startsWithJump(h)) {
- val detour = realTarget(h)
- if(detour.nonEmpty) {
- wasReduced = true
- reachable = (reachable filterNot (detour.keySet.contains(_)))
- rephraseGotos(detour)
- detour.keySet foreach elide
- elided ++= detour.keySet
- newTargets ++= detour.values
- }
+ /**
+ * Computes a mapping from jump only block to its
+ * final destination which is either a non-jump-only
+ * block or, if it's in a jump-only block cycle, is
+ * itself
+ */
+ def computeDetour: mutable.Map[BasicBlock, BasicBlock] = {
+ // fetch the jump only blocks and their immediate destinations
+ val pairs = for {
+ block <- m.blocks.toIterator
+ target <- getJumpOnlyTarget(block)
+ } yield(block, target)
+
+ // mapping from a jump-only block to our current knowledge of its
+ // final destination. Initially it's just jump block to immediate jump
+ // target
+ val detour = mutable.Map[BasicBlock, BasicBlock](pairs.toSeq:_*)
+
+ // for each jump-only block find its final destination
+ // taking advantage of the destinations we found for previous
+ // blocks
+ for (key <- detour.keySet) {
+ // we use the Robert Floyd's classic Tortoise and Hare algorithm
+ @tailrec
+ def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = {
+ if (tortoise == hare)
+ // cycle detected, map key to key
+ key
+ else if (detour contains hare) {
+ // advance hare once
+ val hare1 = detour(hare)
+ // make sure we can advance hare a second time
+ if (detour contains hare1)
+ // advance tortoise once and hare a second time
+ findDestination(detour(tortoise), detour(hare1))
+ else
+ // hare1 is not in the map so it's not a jump-only block, it's the destination
+ hare1
+ } else
+ // hare is not in the map so it's not a jump-only block, it's the destination
+ hare
}
+ // update the mapping for key based on its final destination
+ detour(key) = findDestination(key, detour(key))
+ }
+ detour
+ }
+
+ val detour = computeDetour
+ rephraseGotos(detour)
+
+ if (settings.debug.value) {
+ val (remappings, cycles) = detour partition {case (source, target) => source != target}
+ for ((source, target) <- remappings) {
+ debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.")
+ if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now")
+ }
+ val sources = remappings.keySet
+ val targets = remappings.values.toSet
+ val intersection = sources intersect targets
+
+ if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
+
+ for ((source, _) <- cycles) {
+ debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?")
+ }
+ }
+ }
+
+ /**
+ * Removes all blocks that are unreachable in a method using a standard reachability analysis.
+ */
+ def elimUnreachableBlocks(m: IMethod) {
+ assert(m.hasCode, "code-less method")
+
+ // assume nothing is reachable until we prove it can be reached
+ val reachable = mutable.Set[BasicBlock]()
+
+ // the set of blocks that we know are reachable but have
+ // yet to be marked reachable, initially only the start block
+ val worklist = mutable.Set(m.startBlock)
+
+ while (worklist.nonEmpty) {
+ val block = worklist.head
+ worklist remove block
+ // we know that one is reachable
+ reachable add block
+ // so are its successors, so go back around and add the ones we still
+ // think are unreachable
+ worklist ++= (block.successors filterNot reachable)
+ }
+
+ // exception handlers need to be told not to cover unreachable blocks
+ // and exception handlers that no longer cover any blocks need to be
+ // removed entirely
+ val unusedExceptionHandlers = mutable.Set[ExceptionHandler]()
+ for (exh <- m.exh) {
+ exh.covered = exh.covered filter reachable
+ if (exh.covered.isEmpty) {
+ unusedExceptionHandlers += exh
}
}
- assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain")
- wasReduced
+ // remove the unusued exception handler references
+ if (settings.debug.value)
+ for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
+ m.exh = m.exh filterNot unusedExceptionHandlers
+
+ // everything not in the reachable set is unreachable, unused, and unloved. buh bye
+ for (b <- m.blocks filterNot reachable) {
+ debuglog(s"eliding block $b because it is unreachable")
+ m.code removeBlock b
+ }
}
def normalize(m: IMethod) {
if(!m.hasCode) { return }
collapseJumpOnlyBlocks(m)
- var wasReduced = false;
- do {
- wasReduced = false
- // Prune from an exception handler those covered blocks which are jump-only.
- wasReduced |= coverWhatCountsOnly(m); icodes.checkValid(m) // TODO should be unnecessary now that collapseJumpOnlyBlocks(m) is in place
- // Prune exception handlers covering nothing.
- wasReduced |= elimNonCoveringExh(m); icodes.checkValid(m)
-
- // TODO see note in genExceptionHandlers about an ExceptionHandler.covered containing dead blocks (newNormal should remove them, but, where do those blocks come from?)
- } while (wasReduced)
-
- // TODO this would be a good time to remove synthetic local vars seeing no use, don't forget to call computeLocalVarsIndex() afterwards.
+ if (settings.optimise.value)
+ elimUnreachableBlocks(m)
+ icodes checkValid m
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
deleted file mode 100644
index 72b7e35408..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-
-package scala.tools.nsc
-package backend.jvm
-
-import ch.epfl.lamp.fjbg._
-import symtab.Flags
-
-trait GenAndroid {
- self: GenJVM =>
-
- import global._
- import icodes._
- import opcodes._
-
- /** From the reference documentation of the Android SDK:
- * The `Parcelable` interface identifies classes whose instances can be
- * written to and restored from a `Parcel`. Classes implementing the
- * `Parcelable` interface must also have a static field called `CREATOR`,
- * which is an object implementing the `Parcelable.Creator` interface.
- */
- private val fieldName = newTermName("CREATOR")
-
- private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
- def isAndroidParcelableClass(sym: Symbol) =
- (AndroidParcelableInterface != NoSymbol) &&
- (sym.parentSymbols contains AndroidParcelableInterface)
-
- def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) {
- import codegen._
- val fieldSymbol = (
- clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL)
- setInfo AndroidCreatorClass.tpe
- )
- val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName)
- clasz addField new IField(fieldSymbol)
- block emit CALL_METHOD(methodSymbol, Static(false))
- block emit STORE_FIELD(fieldSymbol, true)
- }
-
- def legacyAddCreatorCode(codegen: BytecodeGenerator, clinit: JExtendedCode) {
- import codegen._
- val creatorType = javaType(AndroidCreatorClass)
- jclass.addNewField(PublicStaticFinal,
- fieldName,
- creatorType)
- val moduleName = javaName(clasz.symbol)+"$"
- clinit.emitGETSTATIC(moduleName,
- nme.MODULE_INSTANCE_FIELD.toString,
- new JObjectType(moduleName))
- clinit.emitINVOKEVIRTUAL(moduleName, fieldName,
- new JMethodType(creatorType, Array()))
- clinit.emitPUTSTATIC(jclass.getName(), fieldName, creatorType)
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
deleted file mode 100644
index 598965b982..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ /dev/null
@@ -1,1921 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import java.io.{ByteArrayOutputStream, DataOutputStream, OutputStream }
-import java.nio.ByteBuffer
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
-import scala.tools.nsc.symtab._
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import scala.reflect.internal.ClassfileConstants._
-import ch.epfl.lamp.fjbg._
-import JAccessFlags._
-import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
-import java.util.jar.{ JarEntry, JarOutputStream }
-import scala.tools.nsc.io.AbstractFile
-import scala.language.postfixOps
-
-/** This class ...
- *
- * @author Iulian Dragos
- * @version 1.0
- *
- */
-abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with BytecodeWriters with GenJVMASM {
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
-
- val phaseName = "jvm"
-
- /** Create a new phase */
- override def newPhase(p: Phase): Phase = new JvmPhase(p)
-
- /** JVM code generation phase
- */
- class JvmPhase(prev: Phase) extends ICodePhase(prev) {
- def name = phaseName
- override def erasedTypes = true
- def apply(cls: IClass) = sys.error("no implementation")
-
- override def run() {
- // we reinstantiate the bytecode generator at each run, to allow the GC
- // to collect everything
- if (settings.debug.value)
- inform("[running phase " + name + " on icode]")
-
- if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
- log(s"Optimizer eliminated ${sym.fullNameString}")
- icodes.classes -= sym
- }
-
- // For predictably ordered error messages.
- val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
- val entryPoints = sortedClasses filter isJavaEntryPoint
-
- val bytecodeWriter = settings.outputDirs.getSingleOutput match {
- case Some(f) if f hasExtension "jar" =>
- // If no main class was specified, see if there's only one
- // entry point among the classes going into the jar.
- if (settings.mainClass.isDefault) {
- entryPoints map (_.symbol fullName '.') match {
- case Nil =>
- log("No Main-Class designated or discovered.")
- case name :: Nil =>
- log("Unique entry point: setting Main-Class to " + name)
- settings.mainClass.value = name
- case names =>
- log("No Main-Class due to multiple entry points:\n " + names.mkString("\n "))
- }
- }
- else log("Main-Class was specified: " + settings.mainClass.value)
-
- new DirectToJarfileWriter(f.file)
-
- case _ =>
- if (settings.Ygenjavap.isDefault) {
- if(settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- }
- else new ClassBytecodeWriter with JavapBytecodeWriter { }
- }
-
- val codeGenerator = new BytecodeGenerator(bytecodeWriter)
- debuglog("Created new bytecode generator for " + classes.size + " classes.")
-
- sortedClasses foreach { c =>
- try codeGenerator.genClass(c)
- catch {
- case e: JCode.CodeSizeTooBigException =>
- log("Skipped class %s because it has methods that are too long.".format(c))
- }
- }
-
- bytecodeWriter.close()
- classes.clear()
- }
- }
-
- var pickledBytes = 0 // statistics
-
- /**
- * Java bytecode generator.
- *
- */
- class BytecodeGenerator(bytecodeWriter: BytecodeWriter) extends BytecodeUtil {
- def this() = this(new ClassBytecodeWriter { })
- def debugLevel = settings.debuginfo.indexOfChoice
- import bytecodeWriter.writeClass
-
- val MIN_SWITCH_DENSITY = 0.7
- val INNER_CLASSES_FLAGS =
- (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_INTERFACE | ACC_ABSTRACT)
-
- val PublicStatic = ACC_PUBLIC | ACC_STATIC
- val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL
-
- val StringBuilderClassName = javaName(definitions.StringBuilderClass)
- val BoxesRunTime = "scala.runtime.BoxesRunTime"
-
- val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType
- val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType
- val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY)
- val MethodTypeType = new JObjectType("java.dyn.MethodType")
- val JavaLangClassType = new JObjectType("java.lang.Class")
- val MethodHandleType = new JObjectType("java.dyn.MethodHandle")
-
- // Scala attributes
- val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
- val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip")
- val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName")
- val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription")
-
- // Additional interface parents based on annotations and other cues
- def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
- case SerializableAttr => Some(SerializableClass)
- case CloneableAttr => Some(JavaCloneableClass)
- case RemoteAttr => Some(RemoteInterfaceClass)
- case _ => None
- }
-
- val versionPickle = {
- val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
- assert(vp.writeIndex == 0, vp)
- vp writeNat PickleFormat.MajorVersion
- vp writeNat PickleFormat.MinorVersion
- vp writeNat 0
- vp
- }
-
- private def helperBoxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
- val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
- val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
-
- Pair("boxTo" + boxedType.decodedName, mtype)
- }
-
- private val jBoxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
- BOOL -> helperBoxTo(BOOL) ,
- BYTE -> helperBoxTo(BYTE) ,
- CHAR -> helperBoxTo(CHAR) ,
- SHORT -> helperBoxTo(SHORT) ,
- INT -> helperBoxTo(INT) ,
- LONG -> helperBoxTo(LONG) ,
- FLOAT -> helperBoxTo(FLOAT) ,
- DOUBLE -> helperBoxTo(DOUBLE)
- )
-
- private def helperUnboxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
- val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
- val mname = "unboxTo" + kind.toType.typeSymbol.decodedName
-
- Pair(mname, mtype)
- }
-
- private val jUnboxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
- BOOL -> helperUnboxTo(BOOL) ,
- BYTE -> helperUnboxTo(BYTE) ,
- CHAR -> helperUnboxTo(CHAR) ,
- SHORT -> helperUnboxTo(SHORT) ,
- INT -> helperUnboxTo(INT) ,
- LONG -> helperUnboxTo(LONG) ,
- FLOAT -> helperUnboxTo(FLOAT) ,
- DOUBLE -> helperUnboxTo(DOUBLE)
- )
-
- var clasz: IClass = _
- var method: IMethod = _
- var jclass: JClass = _
- var jmethod: JMethod = _
- // var jcode: JExtendedCode = _
-
- def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
- def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr
- def serialVUID = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
- case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
- }
-
- val fjbgContext = new FJBGContext(49, 0)
-
- val emitSource = debugLevel >= 1
- val emitLines = debugLevel >= 2
- val emitVars = debugLevel >= 3
-
- // bug had phase with wrong name; leaving enabled for brief pseudo deprecation
- private val checkSignatures = (
- (settings.check containsName phaseName)
- || (settings.check.value contains "genjvm") && {
- global.warning("This option will be removed: please use -Ycheck:%s, not -Ycheck:genjvm." format phaseName)
- true
- }
- )
-
- /** For given symbol return a symbol corresponding to a class that should be declared as inner class.
- *
- * For example:
- * class A {
- * class B
- * object C
- * }
- *
- * then method will return NoSymbol for A, the same symbol for A.B (corresponding to A$B class) and A$C$ symbol
- * for A.C.
- */
- private def innerClassSymbolFor(s: Symbol): Symbol =
- if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
-
- override def javaName(sym: Symbol): String = { // TODO Miguel says: check whether a single pass over `icodes.classes` can populate `innerClassBuffer` faster.
- /**
- * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
- *
- * Note: This method is called recursively thus making sure that we add complete chain
- * of inner class all until root class.
- */
- def collectInnerClass(s: Symbol): Unit = {
- // TODO: some beforeFlatten { ... } which accounts for
- // being nested in parameterized classes (if we're going to selectively flatten.)
- val x = innerClassSymbolFor(s)
- if(x ne NoSymbol) {
- assert(x.isClass, "not an inner-class symbol")
- val isInner = !x.rawowner.isPackageClass
- if (isInner) {
- innerClassBuffer += x
- collectInnerClass(x.rawowner)
- }
- }
- }
- collectInnerClass(sym)
-
- super.javaName(sym)
- }
-
- /** Write a class to disk, adding the Scala signature (pickled type
- * information) and inner classes.
- *
- * @param jclass The FJBG class, where code was emitted
- * @param sym The corresponding symbol, used for looking up pickled information
- */
- def emitClass(jclass: JClass, sym: Symbol) {
- addInnerClasses(jclass)
- writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym)
- }
-
- /** Returns the ScalaSignature annotation if it must be added to this class,
- * none otherwise; furthermore, it adds to `jclass` the ScalaSig marker
- * attribute (marking that a scala signature annotation is present) or the
- * Scala marker attribute (marking that the signature for this class is in
- * another file). The annotation that is returned by this method must be
- * added to the class' annotations list when generating them.
- *
- * @param jclass The class file that is being readied.
- * @param sym The symbol for which the signature has been entered in
- * the symData map. This is different than the symbol
- * that is being generated in the case of a mirror class.
- * @return An option that is:
- * - defined and contains an annotation info of the
- * ScalaSignature type, instantiated with the pickle
- * signature for sym (a ScalaSig marker attribute has
- * been written);
- * - undefined if the jclass/sym couple must not contain a
- * signature (a Scala marker attribute has been written).
- */
- def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
- currentRun.symData get sym match {
- case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) =>
- val scalaAttr =
- fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString,
- versionPickle.bytes, versionPickle.writeIndex)
- jclass addAttribute scalaAttr
- val scalaAnnot = {
- val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
- AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
- }
- pickledBytes += pickle.writeIndex
- currentRun.symData -= sym
- currentRun.symData -= sym.companionSymbol
- Some(scalaAnnot)
- case _ =>
- val markerAttr =
- fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaATTR.toString, new Array[Byte](0), 0)
- jclass addAttribute markerAttr
- None
- }
-
- private var innerClassBuffer = mutable.LinkedHashSet[Symbol]()
-
- /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
- */
- private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = {
- var rest = interfaces
- var leaves = List.empty[Symbol]
- while(!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if(!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
- def genClass(c: IClass) {
- clasz = c
- innerClassBuffer.clear()
-
- val name = javaName(c.symbol)
-
- val ps = c.symbol.info.parents
-
- val superClass: Symbol = if(ps.isEmpty) ObjectClass else ps.head.typeSymbol;
-
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
- val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
-
- val ifaces =
- if(superInterfaces.isEmpty) JClass.NO_INTERFACES
- else mkArray(minimizeInterfaces(superInterfaces) map javaName)
-
- jclass = fjbgContext.JClass(javaFlags(c.symbol),
- name,
- javaName(superClass),
- ifaces,
- c.cunit.source.toString)
-
- if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass) {
- if (isStaticModule(c.symbol))
- addModuleInstanceField
- addStaticInit(jclass, c.lookupStaticCtor)
-
- if (isTopLevelModule(c.symbol)) {
- if (c.symbol.companionClass == NoSymbol)
- generateMirrorClass(c.symbol, c.cunit.source)
- else
- log("No mirror class for module with linked class: " +
- c.symbol.fullName)
- }
- }
- else {
- c.lookupStaticCtor foreach (constructor => addStaticInit(jclass, Some(constructor)))
-
- // it must be a top level class (name contains no $s)
- def isCandidateForForwarders(sym: Symbol): Boolean =
- afterPickler {
- !(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass
- }
-
- // At some point this started throwing lots of exceptions as a compile was finishing.
- // error: java.lang.AssertionError:
- // assertion failed: List(object package$CompositeThrowable, object package$CompositeThrowable)
- // ...is the one I've seen repeatedly. Suppressing.
- val lmoc = (
- try c.symbol.companionModule
- catch { case x: AssertionError =>
- Console.println("Suppressing failed assert: " + x)
- NoSymbol
- }
- )
- // add static forwarders if there are no name conflicts; see bugs #363 and #1735
- if (lmoc != NoSymbol && !c.symbol.isInterface) {
- if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) {
- log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
- addForwarders(jclass, lmoc.moduleClass)
- }
- }
- }
-
- clasz.fields foreach genField
- clasz.methods foreach genMethod
-
- val ssa = scalaSignatureAddingMarker(jclass, c.symbol)
- addGenericSignature(jclass, c.symbol, c.symbol.owner)
- addAnnotations(jclass, c.symbol.annotations ++ ssa)
- addEnclosingMethodAttribute(jclass, c.symbol)
- emitClass(jclass, c.symbol)
-
- if (c.symbol hasAnnotation BeanInfoAttr)
- genBeanInfoClass(c)
- }
-
- private def addEnclosingMethodAttribute(jclass: JClass, clazz: Symbol) {
- val sym = clazz.originalEnclosingMethod
- if (sym.isMethod) {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
- jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
- jclass,
- javaName(sym.enclClass),
- javaName(sym),
- javaType(sym)
- )
- } else if (clazz.isAnonymousClass) {
- val enclClass = clazz.rawowner
- assert(enclClass.isClass, enclClass)
- val sym = enclClass.primaryConstructor
- if (sym == NoSymbol)
- log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(
- enclClass, clazz)
- )
- else {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
- jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
- jclass,
- javaName(enclClass),
- javaName(sym),
- javaType(sym).asInstanceOf[JMethodType]
- )
- }
- }
- }
-
- private def toByteArray(jc: JClass): Array[Byte] = {
- val bos = new java.io.ByteArrayOutputStream()
- val dos = new java.io.DataOutputStream(bos)
- jc.writeTo(dos)
- dos.close()
- bos.toByteArray
- }
-
- /**
- * Generate a bean info class that describes the given class.
- *
- * @author Ross Judson (ross.judson@soletta.com)
- */
- def genBeanInfoClass(c: IClass) {
- val description = c.symbol getAnnotation BeanDescriptionAttr
- // informProgress(description.toString)
-
- val beanInfoClass = fjbgContext.JClass(javaFlags(c.symbol),
- javaName(c.symbol) + "BeanInfo",
- "scala/beans/ScalaBeanInfo",
- JClass.NO_INTERFACES,
- c.cunit.source.toString)
-
- var fieldList = List[String]()
- for (f <- clasz.fields if f.symbol.hasGetter;
- g = f.symbol.getter(c.symbol);
- s = f.symbol.setter(c.symbol);
- if g.isPublic && !(f.symbol.name startsWith "$")) // inserting $outer breaks the bean
- fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
- val methodList =
- for (m <- clasz.methods
- if !m.symbol.isConstructor &&
- m.symbol.isPublic &&
- !(m.symbol.name startsWith "$") &&
- !m.symbol.isGetter &&
- !m.symbol.isSetter) yield javaName(m.symbol)
-
- val constructor = beanInfoClass.addNewMethod(ACC_PUBLIC, "<init>", JType.VOID, new Array[JType](0), new Array[String](0))
- val jcode = constructor.getCode().asInstanceOf[JExtendedCode]
- val strKind = new JObjectType(javaName(StringClass))
- val stringArrayKind = new JArrayType(strKind)
- val conType = new JMethodType(JType.VOID, Array(javaType(ClassClass), stringArrayKind, stringArrayKind))
-
- def push(lst:Seq[String]) {
- var fi = 0
- for (f <- lst) {
- jcode.emitDUP()
- jcode emitPUSH fi
- if (f != null)
- jcode emitPUSH f
- else
- jcode.emitACONST_NULL()
- jcode emitASTORE strKind
- fi += 1
- }
- }
-
- jcode.emitALOAD_0()
- // push the class
- jcode emitPUSH javaType(c.symbol).asInstanceOf[JReferenceType]
-
- // push the string array of field information
- jcode emitPUSH fieldList.length
- jcode emitANEWARRAY strKind
- push(fieldList)
-
- // push the string array of method information
- jcode emitPUSH methodList.length
- jcode emitANEWARRAY strKind
- push(methodList)
-
- // invoke the superclass constructor, which will do the
- // necessary java reflection and create Method objects.
- jcode.emitINVOKESPECIAL("scala/beans/ScalaBeanInfo", "<init>", conType)
- jcode.emitRETURN()
-
- // write the bean information class file.
- writeClass("BeanInfo ", beanInfoClass.getName(), toByteArray(beanInfoClass), c.symbol)
- }
-
- /** Add the given 'throws' attributes to jmethod */
- def addExceptionsAttribute(jmethod: JMethod, excs: List[AnnotationInfo]) {
- if (excs.isEmpty) return
-
- val cpool = jmethod.getConstantPool
- val buf: ByteBuffer = ByteBuffer.allocate(512)
- var nattr = 0
-
- // put some random value; the actual number is determined at the end
- buf putShort 0xbaba.toShort
-
- for (ThrownException(exc) <- excs.distinct) {
- buf.putShort(
- cpool.addClass(
- javaName(exc)).shortValue)
- nattr += 1
- }
-
- assert(nattr > 0, nattr)
- buf.putShort(0, nattr.toShort)
- addAttribute(jmethod, tpnme.ExceptionsATTR, buf)
- }
-
- /** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be un-initialized
- */
- private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(ClassfileAnnotationClass) &&
- annot.args.isEmpty
-
- private def emitJavaAnnotations(cpool: JConstantPool, buf: ByteBuffer, annotations: List[AnnotationInfo]): Int = {
- def emitArgument(arg: ClassfileAnnotArg): Unit = arg match {
- case LiteralAnnotArg(const) =>
- const.tag match {
- case BooleanTag =>
- buf put 'Z'.toByte
- buf putShort cpool.addInteger(if(const.booleanValue) 1 else 0).toShort
- case ByteTag =>
- buf put 'B'.toByte
- buf putShort cpool.addInteger(const.byteValue).toShort
- case ShortTag =>
- buf put 'S'.toByte
- buf putShort cpool.addInteger(const.shortValue).toShort
- case CharTag =>
- buf put 'C'.toByte
- buf putShort cpool.addInteger(const.charValue).toShort
- case IntTag =>
- buf put 'I'.toByte
- buf putShort cpool.addInteger(const.intValue).toShort
- case LongTag =>
- buf put 'J'.toByte
- buf putShort cpool.addLong(const.longValue).toShort
- case FloatTag =>
- buf put 'F'.toByte
- buf putShort cpool.addFloat(const.floatValue).toShort
- case DoubleTag =>
- buf put 'D'.toByte
- buf putShort cpool.addDouble(const.doubleValue).toShort
- case StringTag =>
- buf put 's'.toByte
- buf putShort cpool.addUtf8(const.stringValue).toShort
- case ClazzTag =>
- buf put 'c'.toByte
- buf putShort cpool.addUtf8(javaType(const.typeValue).getSignature()).toShort
- case EnumTag =>
- buf put 'e'.toByte
- buf putShort cpool.addUtf8(javaType(const.tpe).getSignature()).toShort
- buf putShort cpool.addUtf8(const.symbolValue.name.toString).toShort
- }
-
- case sb@ScalaSigBytes(bytes) if !sb.isLong =>
- buf put 's'.toByte
- buf putShort cpool.addUtf8(sb.encodedBytes).toShort
-
- case sb@ScalaSigBytes(bytes) if sb.isLong =>
- buf put '['.toByte
- val stringCount = (sb.encodedBytes.length / 65534) + 1
- buf putShort stringCount.toShort
- for (i <- 0 until stringCount) {
- buf put 's'.toByte
- val j = i * 65535
- val string = sb.encodedBytes.slice(j, j + 65535)
- buf putShort cpool.addUtf8(string).toShort
- }
-
- case ArrayAnnotArg(args) =>
- buf put '['.toByte
- buf putShort args.length.toShort
- args foreach emitArgument
-
- case NestedAnnotArg(annInfo) =>
- buf put '@'.toByte
- emitAnnotation(annInfo)
- }
-
- def emitAnnotation(annotInfo: AnnotationInfo) {
- val AnnotationInfo(typ, args, assocs) = annotInfo
- val jtype = javaType(typ)
- buf putShort cpool.addUtf8(jtype.getSignature()).toShort
- assert(args.isEmpty, args)
- buf putShort assocs.length.toShort
- for ((name, value) <- assocs) {
- buf putShort cpool.addUtf8(name.toString).toShort
- emitArgument(value)
- }
- }
-
- var nannots = 0
- val pos = buf.position()
-
- // put some random value; the actual number of annotations is determined at the end
- buf putShort 0xbaba.toShort
-
- for (annot <- annotations if shouldEmitAnnotation(annot)) {
- nannots += 1
- emitAnnotation(annot)
- }
-
- // save the number of annotations
- buf.putShort(pos, nannots.toShort)
- nannots
- }
-
- // @M don't generate java generics sigs for (members of) implementation
- // classes, as they are monomorphic (TODO: ok?)
- private def needsGenericSignature(sym: Symbol) = !(
- // PP: This condition used to include sym.hasExpandedName, but this leads
- // to the total loss of generic information if a private member is
- // accessed from a closure: both the field and the accessor were generated
- // without it. This is particularly bad because the availability of
- // generic information could disappear as a consequence of a seemingly
- // unrelated change.
- settings.Ynogenericsig.value
- || sym.isArtifact
- || sym.isLiftedMethod
- || sym.isBridge
- || (sym.ownerChain exists (_.isImplClass))
- )
- def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
- if (needsGenericSignature(sym)) {
- val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
-
- erasure.javaSig(sym, memberTpe) foreach { sig =>
- // This seems useful enough in the general case.
- log(sig)
- if (checkSignatures) {
- val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
- val bytecodeTpe = owner.thisType.memberInfo(sym)
- if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
- clasz.cunit.warning(sym.pos,
- """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
- |signature: %s
- |original type: %s
- |normalized type: %s
- |erasure type: %s
- |if this is reproducible, please report bug at https://issues.scala-lang.org/
- """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
- return
- }
- }
- val index = jmember.getConstantPool.addUtf8(sig).toShort
- if (opt.verboseDebug)
- beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index))
-
- val buf = ByteBuffer.allocate(2)
- buf putShort index
- addAttribute(jmember, tpnme.SignatureATTR, buf)
- }
- }
- }
-
- def addAnnotations(jmember: JMember, annotations: List[AnnotationInfo]) {
- if (annotations exists (_ matches definitions.DeprecatedAttr)) {
- val attr = jmember.getContext().JOtherAttribute(
- jmember.getJClass(), jmember, tpnme.DeprecatedATTR.toString,
- new Array[Byte](0), 0)
- jmember addAttribute attr
- }
-
- val toEmit = annotations filter shouldEmitAnnotation
- if (toEmit.isEmpty) return
-
- val buf: ByteBuffer = ByteBuffer.allocate(2048)
- emitJavaAnnotations(jmember.getConstantPool, buf, toEmit)
- addAttribute(jmember, tpnme.RuntimeAnnotationATTR, buf)
- }
-
- def addParamAnnotations(jmethod: JMethod, pannotss: List[List[AnnotationInfo]]) {
- val annotations = pannotss map (_ filter shouldEmitAnnotation)
- if (annotations forall (_.isEmpty)) return
-
- val buf: ByteBuffer = ByteBuffer.allocate(2048)
-
- // number of parameters
- buf.put(annotations.length.toByte)
- for (annots <- annotations)
- emitJavaAnnotations(jmethod.getConstantPool, buf, annots)
-
- addAttribute(jmethod, tpnme.RuntimeParamAnnotationATTR, buf)
- }
-
- def addAttribute(jmember: JMember, name: Name, buf: ByteBuffer) {
- if (buf.position() < 2)
- return
-
- val length = buf.position()
- val arr = buf.array().slice(0, length)
-
- val attr = jmember.getContext().JOtherAttribute(jmember.getJClass(),
- jmember,
- name.toString,
- arr,
- length)
- jmember addAttribute attr
- }
-
- def addInnerClasses(jclass: JClass) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.")
- val innerClassesAttr = jclass.getInnerClasses()
- // sort them so inner classes succeed their enclosing class
- // to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) {
- val flags = {
- val staticFlag = if (innerSym.rawowner.hasModuleFlag) ACC_STATIC else 0
- (javaFlags(innerSym) | staticFlag) & INNER_CLASSES_FLAGS
- }
- val jname = javaName(innerSym)
- val oname = outerName(innerSym)
- val iname = innerName(innerSym)
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- innerClassesAttr.addEntry(jname, oname, iname, flags)
- }
- }
- }
-
- def genField(f: IField) {
- debuglog("Adding field: " + f.symbol.fullName)
-
- val jfield = jclass.addNewField(
- javaFieldFlags(f.symbol),
- javaName(f.symbol),
- javaType(f.symbol.tpe)
- )
-
- addGenericSignature(jfield, f.symbol, clasz.symbol)
- addAnnotations(jfield, f.symbol.annotations)
- }
-
- def genMethod(m: IMethod) {
- if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
-
- debuglog("Generating method " + m.symbol.fullName)
- method = m
- endPC.clear
- computeLocalVarsIndex(m)
-
- var resTpe = javaType(m.symbol.tpe.resultType)
- if (m.symbol.isClassConstructor)
- resTpe = JType.VOID
-
- var flags = javaFlags(m.symbol)
- if (jclass.isInterface)
- flags |= ACC_ABSTRACT
-
- if (m.symbol.isStrictFP)
- flags |= ACC_STRICT
-
- // native methods of objects are generated in mirror classes
- if (method.native)
- flags |= ACC_NATIVE
-
- jmethod = jclass.addNewMethod(flags,
- javaName(m.symbol),
- resTpe,
- mkArray(m.params map (p => javaType(p.kind))),
- mkArray(m.params map (p => javaName(p.sym))))
-
- addRemoteException(jmethod, m.symbol)
-
- if (!jmethod.isAbstract() && !method.native) {
- val jcode = jmethod.getCode().asInstanceOf[JExtendedCode]
-
- // add a fake local for debugging purposes
- if (emitVars && isClosureApply(method.symbol)) {
- val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL)
- if (outerField != NoSymbol) {
- log("Adding fake local to represent outer 'this' for closure " + clasz)
- val _this = new Local(
- method.symbol.newVariable(nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
- m.locals = m.locals ::: List(_this)
- computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
-
- jcode.emitALOAD_0()
- jcode.emitGETFIELD(javaName(clasz.symbol),
- javaName(outerField),
- javaType(outerField))
- jcode.emitSTORE(indexOf(_this), javaType(_this.kind))
- }
- }
-
- for (local <- m.locals if ! m.params.contains(local)) {
- debuglog("add local var: " + local)
- jmethod.addNewLocalVariable(javaType(local.kind), javaName(local.sym))
- }
-
- genCode(m)
- if (emitVars)
- genLocalVariableTable(m, jcode)
- }
-
- addGenericSignature(jmethod, m.symbol, clasz.symbol)
- val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
- addExceptionsAttribute(jmethod, excs)
- addAnnotations(jmethod, others)
- addParamAnnotations(jmethod, m.params.map(_.sym.annotations))
-
- // check for code size
- try jmethod.freeze()
- catch {
- case e: JCode.CodeSizeTooBigException =>
- clasz.cunit.error(m.symbol.pos, "Code size exceeds JVM limits: %d".format(e.codeSize))
- throw e
- }
- }
-
- /** Adds a @remote annotation, actual use unknown.
- */
- private def addRemoteException(jmethod: JMethod, meth: Symbol) {
- val needsAnnotation = (
- (isRemoteClass || (meth hasAnnotation RemoteAttr) && jmethod.isPublic)
- && !(meth.throwsAnnotations contains RemoteExceptionClass)
- )
- if (needsAnnotation) {
- val c = Constant(RemoteExceptionClass.tpe)
- val arg = Literal(c) setType c.tpe
- meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
- }
- }
-
- private def isClosureApply(sym: Symbol): Boolean = {
- (sym.name == nme.apply) &&
- sym.owner.isSynthetic &&
- sym.owner.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t
- FunctionClass contains sym
- }
- }
-
- def addModuleInstanceField() {
- jclass.addNewField(PublicStaticFinal,
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
- }
-
- def addStaticInit(cls: JClass, mopt: Option[IMethod]) {
- val clinitMethod = cls.addNewMethod(PublicStatic,
- "<clinit>",
- JType.VOID,
- JType.EMPTY_ARRAY,
- new Array[String](0))
- val clinit = clinitMethod.getCode().asInstanceOf[JExtendedCode]
-
- mopt match {
- case Some(m) =>
- val oldLastBlock = m.lastBlock
- val lastBlock = m.newBlock()
- oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
-
- if (isStaticModule(clasz.symbol)) {
- // call object's private ctor from static ctor
- lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
- lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
- }
-
- // add serialVUID code
- serialVUID foreach { value =>
- import Flags._, definitions._
- val fieldName = "serialVersionUID"
- val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, STATIC | FINAL) setInfo LongClass.tpe
- clasz addField new IField(fieldSymbol)
- lastBlock emit CONSTANT(Constant(value))
- lastBlock emit STORE_FIELD(fieldSymbol, true)
- }
-
- if (isParcelableClass)
- addCreatorCode(BytecodeGenerator.this, lastBlock)
-
- lastBlock emit RETURN(UNIT)
- lastBlock.close
-
- method = m
- jmethod = clinitMethod
- genCode(m)
- case None =>
- legacyStaticInitializer(cls, clinit)
- }
- }
-
- private def legacyStaticInitializer(cls: JClass, clinit: JExtendedCode) {
- if (isStaticModule(clasz.symbol)) {
- clinit emitNEW cls.getName()
- clinit.emitINVOKESPECIAL(cls.getName(),
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
- }
-
- serialVUID foreach { value =>
- val fieldName = "serialVersionUID"
- jclass.addNewField(PublicStaticFinal, fieldName, JType.LONG)
- clinit emitPUSH value
- clinit.emitPUSH(value)
- clinit.emitPUTSTATIC(jclass.getName(), fieldName, JType.LONG)
- }
-
- if (isParcelableClass)
- legacyAddCreatorCode(BytecodeGenerator.this, clinit)
-
- clinit.emitRETURN()
- }
-
- /** Add a forwarder for method m */
- def addForwarder(jclass: JClass, module: Symbol, m: Symbol) {
- val moduleName = javaName(module)
- val methodInfo = module.thisType.memberInfo(m)
- val paramJavaTypes = methodInfo.paramTypes map javaType
- val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
- // TODO: evaluate the other flags we might be dropping on the floor here.
- val flags = PublicStatic | (
- if (m.isVarargsMethod) ACC_VARARGS else 0
- )
-
- /** Forwarders must not be marked final, as the JVM will not allow
- * redefinition of a final static method, and we don't know what classes
- * might be subclassing the companion class. See SI-4827.
- */
- val mirrorMethod = jclass.addNewMethod(
- flags,
- javaName(m),
- javaType(methodInfo.resultType),
- mkArray(paramJavaTypes),
- mkArray(paramNames))
- val mirrorCode = mirrorMethod.getCode().asInstanceOf[JExtendedCode]
- mirrorCode.emitGETSTATIC(moduleName,
- nme.MODULE_INSTANCE_FIELD.toString,
- new JObjectType(moduleName))
-
- var i = 0
- var index = 0
- var argTypes = mirrorMethod.getArgumentTypes()
- while (i < argTypes.length) {
- mirrorCode.emitLOAD(index, argTypes(i))
- index += argTypes(i).getSize()
- i += 1
- }
-
- mirrorCode.emitINVOKEVIRTUAL(moduleName, mirrorMethod.getName, javaType(m).asInstanceOf[JMethodType])
- mirrorCode emitRETURN mirrorMethod.getReturnType()
-
- addRemoteException(mirrorMethod, m)
- // only add generic signature if the method is concrete; bug #1745
- if (!m.isDeferred)
- addGenericSignature(mirrorMethod, m, module)
-
- val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
- addExceptionsAttribute(mirrorMethod, throws)
- addAnnotations(mirrorMethod, others)
- addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
- }
-
- /** Add forwarders for all methods defined in `module` that don't conflict
- * with methods in the companion class of `module`. A conflict arises when
- * a method with the same name is defined both in a class and its companion
- * object: method signature is not taken into account.
- */
- def addForwarders(jclass: JClass, moduleClass: Symbol) {
- assert(moduleClass.isModuleClass, moduleClass)
- debuglog("Dumping mirror class for object: " + moduleClass)
-
- val className = jclass.getName
- val linkedClass = moduleClass.companionClass
- val linkedModule = linkedClass.companionSymbol
- lazy val conflictingNames: Set[Name] = {
- linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
- }
- debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
-
- for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
- if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
- debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- else if (conflictingNames(m.name))
- log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
- else {
- log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- addForwarder(jclass, moduleClass, m)
- }
- }
- }
-
- /** Generate a mirror class for a top-level module. A mirror class is a class
- * containing only static methods that forward to the corresponding method
- * on the MODULE instance of the given Scala object. It will only be
- * generated if there is no companion class: if there is, an attempt will
- * instead be made to add the forwarder methods to the companion class.
- */
- def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
- import JAccessFlags._
- /* We need to save inner classes buffer and create a new one to make sure
- * that we do confuse inner classes of the class we mirror with inner
- * classes of the class we are mirroring. These two sets can be different
- * as seen in this case:
- *
- * class A {
- * class B
- * def b: B = new B
- * }
- * object C extends A
- *
- * Here mirror class of C has a static forwarder for (inherited) method `b`
- * therefore it refers to class `B` and needs InnerClasses entry. However,
- * the real class for `C` (named `C$`) is empty and does not refer to `B`
- * thus does not need InnerClasses entry it.
- *
- * NOTE: This logic has been refactored in GenASM and everything is
- * implemented in a much cleaner way by having two separate buffers.
- */
- val savedInnerClasses = innerClassBuffer
- innerClassBuffer = mutable.LinkedHashSet[Symbol]()
- val moduleName = javaName(clasz) // + "$"
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val mirrorClass = fjbgContext.JClass(ACC_SUPER | ACC_PUBLIC | ACC_FINAL,
- mirrorName,
- JAVA_LANG_OBJECT.getName,
- JClass.NO_INTERFACES,
- "" + sourceFile)
-
- log("Dumping mirror class for '%s'".format(mirrorClass.getName))
- addForwarders(mirrorClass, clasz)
- val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
- addAnnotations(mirrorClass, clasz.annotations ++ ssa)
- emitClass(mirrorClass, clasz)
- innerClassBuffer = savedInnerClasses
- }
-
- var linearization: List[BasicBlock] = Nil
- var isModuleInitialized = false
-
- /**
- * @param m ...
- */
- def genCode(m: IMethod) {
- val jcode = jmethod.getCode.asInstanceOf[JExtendedCode]
-
- def makeLabels(bs: List[BasicBlock]) = {
- debuglog("Making labels for: " + method)
-
- mutable.HashMap(bs map (_ -> jcode.newLabel) : _*)
- }
-
- isModuleInitialized = false
-
- linearization = linearizer.linearize(m)
- val labels = makeLabels(linearization)
-
- var nextBlock: BasicBlock = linearization.head
-
- def genBlocks(l: List[BasicBlock]): Unit = l match {
- case Nil => ()
- case x :: Nil => nextBlock = null; genBlock(x)
- case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
- }
-
- /** Generate exception handlers for the current method. */
- def genExceptionHandlers() {
-
- /** Return a list of pairs of intervals where the handler is active.
- * The intervals in the list have to be inclusive in the beginning and
- * exclusive in the end: [start, end).
- */
- def ranges(e: ExceptionHandler): List[(Int, Int)] = {
- var covered = e.covered
- var ranges: List[(Int, Int)] = Nil
- var start = -1
- var end = -1
-
- linearization foreach { b =>
- if (! (covered contains b) ) {
- if (start >= 0) { // we're inside a handler range
- end = labels(b).getAnchor()
- ranges ::= ((start, end))
- start = -1
- }
- } else {
- if (start < 0) // we're not inside a handler range
- start = labels(b).getAnchor()
-
- end = endPC(b)
- covered -= b
- }
- }
-
- /* Add the last interval. Note that since the intervals are
- * open-ended to the right, we have to give a number past the actual
- * code!
- */
- if (start >= 0) {
- ranges ::= ((start, jcode.getPC()))
- }
-
- if (!covered.isEmpty)
- debuglog("Some covered blocks were not found in method: " + method +
- " covered: " + covered + " not in " + linearization)
- ranges
- }
-
- for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
- if (p._1 < p._2) {
- debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
- val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
- else javaName(e.cls)
- jcode.addExceptionHandler(p._1, p._2,
- labels(e.startBlock).getAnchor(),
- cls)
- } else
- log("Empty exception range: " + p)
- }
- }
-
- def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
- target.isPublic || target.isProtected && {
- (site.enclClass isSubClass target.enclClass) ||
- (site.enclosingPackage == target.privateWithin)
- }
- }
-
- def genCallMethod(call: CALL_METHOD) {
- val CALL_METHOD(method, style) = call
- val siteSymbol = clasz.symbol
- val hostSymbol = call.hostClass
- val methodOwner = method.owner
- // info calls so that types are up to date; erasure may add lateINTERFACE to traits
- hostSymbol.info ; methodOwner.info
-
- def isInterfaceCall(sym: Symbol) = (
- sym.isInterface && methodOwner != ObjectClass
- || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
- )
- // whether to reference the type of the receiver or
- // the type of the method owner (if not an interface!)
- val useMethodOwner = (
- style != Dynamic
- || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
- || hostSymbol.isBottomClass
- )
- val receiver = if (useMethodOwner) methodOwner else hostSymbol
- val jowner = javaName(receiver)
- val jname = javaName(method)
- val jtype = javaType(method).asInstanceOf[JMethodType]
-
- def dbg(invoke: String) {
- debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
- }
-
- def initModule() {
- // we initialize the MODULE$ field immediately after the super ctor
- if (isStaticModule(siteSymbol) && !isModuleInitialized &&
- jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
- jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
- isModuleInitialized = true
- jcode.emitALOAD_0()
- jcode.emitPUTSTATIC(jclass.getName(),
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
- }
- }
-
- style match {
- case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
- case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
- case SuperCall(_) =>
- dbg("invokespecial")
- jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- initModule()
- }
- }
-
- def genBlock(b: BasicBlock) {
- labels(b).anchorToNext()
-
- debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
- var lastMappedPC = 0
- var lastLineNr = 0
- var crtPC = 0
-
- /** local variables whose scope appears in this block. */
- val varsInBlock: mutable.Set[Local] = new mutable.HashSet
- val lastInstr = b.lastInstruction
-
- for (instr <- b) {
- instr match {
- case THIS(clasz) => jcode.emitALOAD_0()
-
- case CONSTANT(const) => genConstant(jcode, const)
-
- case LOAD_ARRAY_ITEM(kind) =>
- if(kind.isRefOrArrayType) { jcode.emitAALOAD() }
- else {
- (kind: @unchecked) match {
- case UNIT => throw new IllegalArgumentException("invalid type for aload " + kind)
- case BOOL | BYTE => jcode.emitBALOAD()
- case SHORT => jcode.emitSALOAD()
- case CHAR => jcode.emitCALOAD()
- case INT => jcode.emitIALOAD()
- case LONG => jcode.emitLALOAD()
- case FLOAT => jcode.emitFALOAD()
- case DOUBLE => jcode.emitDALOAD()
- }
- }
-
- case LOAD_LOCAL(local) => jcode.emitLOAD(indexOf(local), javaType(local.kind))
-
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- val fieldJName = javaName(field)
- val fieldJType = javaType(field)
- if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType)
- else jcode.emitGETFIELD( owner, fieldJName, fieldJType)
-
- case LOAD_MODULE(module) =>
- // assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
- jcode.emitALOAD_0()
- else
- jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
- nme.MODULE_INSTANCE_FIELD.toString,
- javaType(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- if(kind.isRefOrArrayType) { jcode.emitAASTORE() }
- else {
- (kind: @unchecked) match {
- case UNIT => throw new IllegalArgumentException("invalid type for astore " + kind)
- case BOOL | BYTE => jcode.emitBASTORE()
- case SHORT => jcode.emitSASTORE()
- case CHAR => jcode.emitCASTORE()
- case INT => jcode.emitIASTORE()
- case LONG => jcode.emitLASTORE()
- case FLOAT => jcode.emitFASTORE()
- case DOUBLE => jcode.emitDASTORE()
- }
- }
-
- case STORE_LOCAL(local) =>
- jcode.emitSTORE(indexOf(local), javaType(local.kind))
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jcode.emitASTORE_0()
-
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- val fieldJName = javaName(field)
- val fieldJType = javaType(field)
- if (isStatic) jcode.emitPUTSTATIC(owner, fieldJName, fieldJType)
- else jcode.emitPUTFIELD( owner, fieldJName, fieldJType)
-
- case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
-
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getSignature()
- jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
-
- case call @ CALL_METHOD(method, style) => genCallMethod(call)
-
- case BOX(kind) =>
- val Pair(mname, mtype) = jBoxTo(kind)
- jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
- case UNBOX(kind) =>
- val Pair(mname, mtype) = jUnboxTo(kind)
- jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jcode emitNEW className
-
- case CREATE_ARRAY(elem, 1) =>
- if(elem.isRefOrArrayType) { jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType] }
- else { jcode emitNEWARRAY javaType(elem) }
-
- case CREATE_ARRAY(elem, dims) =>
- jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
-
- case IS_INSTANCE(tpe) =>
- tpe match {
- case REFERENCE(cls) => jcode emitINSTANCEOF new JObjectType(javaName(cls))
- case ARRAY(elem) => jcode emitINSTANCEOF new JArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
-
- case CHECK_CAST(tpe) =>
- tpe match {
- case REFERENCE(cls) => if (cls != ObjectClass) { jcode emitCHECKCAST new JObjectType(javaName(cls)) } // No need to checkcast for Objects
- case ARRAY(elem) => jcode emitCHECKCAST new JArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
-
- case SWITCH(tags, branches) =>
- val tagArray = new Array[Array[Int]](tags.length)
- var caze = tags
- var i = 0
-
- while (i < tagArray.length) {
- tagArray(i) = new Array[Int](caze.head.length)
- caze.head.copyToArray(tagArray(i), 0)
- i += 1
- caze = caze.tail
- }
- val branchArray = jcode.newLabels(tagArray.length)
- i = 0
- while (i < branchArray.length) {
- branchArray(i) = labels(branches(i))
- i += 1
- }
- debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
- jcode.emitSWITCH(tagArray,
- branchArray,
- labels(branches.last),
- MIN_SWITCH_DENSITY)
- ()
-
- case JUMP(whereto) =>
- if (nextBlock != whereto)
- jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
-
- case CJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF_ICMP(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ICMP(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- if (nextBlock == success) {
- jcode.emitIF_ACMP(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ACMP(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLCMP()
- case FLOAT =>
- if (cond == LT || cond == LE) jcode.emitFCMPG()
- else jcode.emitFCMPL()
- case DOUBLE =>
- if (cond == LT || cond == LE) jcode.emitDCMPG()
- else jcode.emitDCMPL()
- }
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF(conds(cond), labels(success));
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- }
-
- case CZJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- } else {
- jcode.emitIF(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- val Success = success
- val Failure = failure
- (cond, nextBlock) match {
- case (EQ, Success) => jcode emitIFNONNULL labels(failure)
- case (NE, Failure) => jcode emitIFNONNULL labels(success)
- case (EQ, Failure) => jcode emitIFNULL labels(success)
- case (NE, Success) => jcode emitIFNULL labels(failure)
- case (EQ, _) =>
- jcode emitIFNULL labels(success)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- case (NE, _) =>
- jcode emitIFNONNULL labels(success)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- case _ =>
- }
- } else {
- (kind: @unchecked) match {
- case LONG =>
- jcode.emitLCONST_0()
- jcode.emitLCMP()
- case FLOAT =>
- jcode.emitFCONST_0()
- if (cond == LT || cond == LE) jcode.emitFCMPG()
- else jcode.emitFCMPL()
- case DOUBLE =>
- jcode.emitDCONST_0()
- if (cond == LT || cond == LE) jcode.emitDCMPG()
- else jcode.emitDCMPL()
- }
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- } else {
- jcode.emitIF(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- }
-
- case RETURN(kind) => jcode emitRETURN javaType(kind)
-
- case THROW(_) => jcode.emitATHROW()
-
- case DROP(kind) =>
- if(kind.isWideType) jcode.emitPOP2()
- else jcode.emitPOP()
-
- case DUP(kind) =>
- if(kind.isWideType) jcode.emitDUP2()
- else jcode.emitDUP()
-
- case MONITOR_ENTER() => jcode.emitMONITORENTER()
-
- case MONITOR_EXIT() => jcode.emitMONITOREXIT()
-
- case SCOPE_ENTER(lv) =>
- varsInBlock += lv
- lv.start = jcode.getPC()
-
- case SCOPE_EXIT(lv) =>
- if (varsInBlock(lv)) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- varsInBlock -= lv
- }
- else if (b.varsInScope(lv)) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- b.varsInScope -= lv
- }
- else dumpMethodAndAbort(method, "Illegal local var nesting")
-
- case LOAD_EXCEPTION(_) =>
- ()
- }
-
- crtPC = jcode.getPC()
-
- // assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
- // val crtLine = instr.pos.line.get(lastLineNr);
-
- val crtLine = try {
- if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
- } catch {
- case _: UnsupportedOperationException =>
- log("Warning: wrong position in: " + method)
- lastLineNr
- }
-
- if (instr eq lastInstr) { endPC(b) = jcode.getPC() }
-
- //System.err.println("CRTLINE: " + instr.pos + " " +
- // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
-
- if (crtPC > lastMappedPC) {
- jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
- lastMappedPC = crtPC
- lastLineNr = crtLine
- }
- }
-
- // local vars that survived this basic block
- for (lv <- varsInBlock) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- }
- for (lv <- b.varsInScope) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- }
- }
-
-
- /**
- * @param primitive ...
- * @param pos ...
- */
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- if(kind.isIntSizedType) { jcode.emitINEG() }
- else {
- kind match {
- case LONG => jcode.emitLNEG()
- case FLOAT => jcode.emitFNEG()
- case DOUBLE => jcode.emitDNEG()
- case _ => abort("Impossible to negate a " + kind)
- }
- }
-
- case Arithmetic(op, kind) =>
- op match {
- case ADD =>
- if(kind.isIntSizedType) { jcode.emitIADD() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLADD()
- case FLOAT => jcode.emitFADD()
- case DOUBLE => jcode.emitDADD()
- }
- }
-
- case SUB =>
- if(kind.isIntSizedType) { jcode.emitISUB() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLSUB()
- case FLOAT => jcode.emitFSUB()
- case DOUBLE => jcode.emitDSUB()
- }
- }
-
- case MUL =>
- if(kind.isIntSizedType) { jcode.emitIMUL() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLMUL()
- case FLOAT => jcode.emitFMUL()
- case DOUBLE => jcode.emitDMUL()
- }
- }
-
- case DIV =>
- if(kind.isIntSizedType) { jcode.emitIDIV() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLDIV()
- case FLOAT => jcode.emitFDIV()
- case DOUBLE => jcode.emitDDIV()
- }
- }
-
- case REM =>
- if(kind.isIntSizedType) { jcode.emitIREM() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLREM()
- case FLOAT => jcode.emitFREM()
- case DOUBLE => jcode.emitDREM()
- }
- }
-
- case NOT =>
- if(kind.isIntSizedType) {
- jcode.emitPUSH(-1)
- jcode.emitIXOR()
- } else if(kind == LONG) {
- jcode.emitPUSH(-1l)
- jcode.emitLXOR()
- } else {
- abort("Impossible to negate an " + kind)
- }
-
- case _ =>
- abort("Unknown arithmetic primitive " + primitive)
- }
-
- case Logical(op, kind) => ((op, kind): @unchecked) match {
- case (AND, LONG) => jcode.emitLAND()
- case (AND, INT) => jcode.emitIAND()
- case (AND, _) =>
- jcode.emitIAND()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (OR, LONG) => jcode.emitLOR()
- case (OR, INT) => jcode.emitIOR()
- case (OR, _) =>
- jcode.emitIOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (XOR, LONG) => jcode.emitLXOR()
- case (XOR, INT) => jcode.emitIXOR()
- case (XOR, _) =>
- jcode.emitIXOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
- }
-
- case Shift(op, kind) => ((op, kind): @unchecked) match {
- case (LSL, LONG) => jcode.emitLSHL()
- case (LSL, INT) => jcode.emitISHL()
- case (LSL, _) =>
- jcode.emitISHL()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (ASR, LONG) => jcode.emitLSHR()
- case (ASR, INT) => jcode.emitISHR()
- case (ASR, _) =>
- jcode.emitISHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (LSR, LONG) => jcode.emitLUSHR()
- case (LSR, INT) => jcode.emitIUSHR()
- case (LSR, _) =>
- jcode.emitIUSHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
- }
-
- case Comparison(op, kind) => ((op, kind): @unchecked) match {
- case (CMP, LONG) => jcode.emitLCMP()
- case (CMPL, FLOAT) => jcode.emitFCMPL()
- case (CMPG, FLOAT) => jcode.emitFCMPG()
- case (CMPL, DOUBLE) => jcode.emitDCMPL()
- case (CMPG, DOUBLE) => jcode.emitDCMPL()
- }
-
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) {
- println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line)
- } else
- jcode.emitT2T(javaType(src), javaType(dst))
-
- case ArrayLength(_) =>
- jcode.emitARRAYLENGTH()
-
- case StartConcat =>
- jcode emitNEW StringBuilderClassName
- jcode.emitDUP()
- jcode.emitINVOKESPECIAL(StringBuilderClassName,
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
-
- case StringConcat(el) =>
- val jtype = el match {
- case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
- case _ => javaType(el)
- }
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "append",
- new JMethodType(StringBuilderType,
- Array(jtype)))
- case EndConcat =>
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "toString",
- toStringType)
-
- case _ =>
- abort("Unimplemented primitive " + primitive)
- }
- }
-
- // genCode starts here
- genBlocks(linearization)
-
- if (this.method.exh != Nil)
- genExceptionHandlers;
- }
-
-
- /** Emit a Local variable table for debugging purposes.
- * Synthetic locals are skipped. All variables are method-scoped.
- */
- private def genLocalVariableTable(m: IMethod, jcode: JCode) {
- val vars = m.locals filterNot (_.sym.isSynthetic)
- if (vars.isEmpty) return
-
- val pool = jclass.getConstantPool
- val pc = jcode.getPC()
- var anonCounter = 0
- var entries = 0
- vars.foreach { lv =>
- lv.ranges = mergeEntries(lv.ranges.reverse);
- entries += lv.ranges.length
- }
- if (!jmethod.isStatic()) entries += 1
-
- val lvTab = ByteBuffer.allocate(2 + 10 * entries)
- def emitEntry(name: String, signature: String, idx: Short, start: Short, end: Short) {
- lvTab putShort start
- lvTab putShort end
- lvTab putShort pool.addUtf8(name).toShort
- lvTab putShort pool.addUtf8(signature).toShort
- lvTab putShort idx
- }
-
- lvTab.putShort(entries.toShort)
-
- if (!jmethod.isStatic()) {
- emitEntry("this", jclass.getType().getSignature(), 0, 0.toShort, pc.toShort)
- }
-
- for (lv <- vars) {
- val name = if (javaName(lv.sym) eq null) {
- anonCounter += 1
- "<anon" + anonCounter + ">"
- } else javaName(lv.sym)
-
- val index = indexOf(lv).toShort
- val tpe = javaType(lv.kind).getSignature()
- for ((start, end) <- lv.ranges) {
- emitEntry(name, tpe, index, start.toShort, (end - start).toShort)
- }
- }
- val attr =
- fjbgContext.JOtherAttribute(jclass,
- jcode,
- tpnme.LocalVariableTableATTR.toString,
- lvTab.array())
- jcode addAttribute attr
- }
-
-
- /** For each basic block, the first PC address following it. */
- val endPC = new mutable.HashMap[BasicBlock, Int]
-
- ////////////////////// local vars ///////////////////////
-
- def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
- def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
-
- def indexOf(m: IMethod, sym: Symbol): Int = {
- val Some(local) = m lookupLocal sym
- indexOf(local)
- }
-
- def indexOf(local: Local): Int = {
- assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
- local.index
- }
-
- /**
- * Compute the indexes of each local variable of the given
- * method. *Does not assume the parameters come first!*
- */
- def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1;
-
- for (l <- m.params) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
-
- for (l <- m.locals if !(m.params contains l)) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
- }
-
- ////////////////////// Utilities ////////////////////////
-
- /** Merge adjacent ranges. */
- private def mergeEntries(ranges: List[(Int, Int)]): List[(Int, Int)] =
- (ranges.foldLeft(Nil: List[(Int, Int)]) { (collapsed: List[(Int, Int)], p: (Int, Int)) => (collapsed, p) match {
- case (Nil, _) => List(p)
- case ((s1, e1) :: rest, (s2, e2)) if (e1 == s2) => (s1, e2) :: rest
- case _ => p :: collapsed
- }}).reverse
- }
-
- private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
-
- /**
- * Return the Java modifiers for the given symbol.
- * Java modifiers for classes:
- * - public, abstract, final, strictfp (not used)
- * for interfaces:
- * - the same as for classes, without 'final'
- * for fields:
- * - public, private (*)
- * - static, final
- * for methods:
- * - the same as for fields, plus:
- * - abstract, synchronized (not used), strictfp (not used), native (not used)
- *
- * (*) protected cannot be used, since inner classes 'see' protected members,
- * and they would fail verification after lifted.
- */
- def javaFlags(sym: Symbol): Int = {
- // constructors of module classes should be private
- // PP: why are they only being marked private at this stage and not earlier?
- val privateFlag =
- sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
-
- // Final: the only fields which can receive ACC_FINAL are eager vals.
- // Neither vars nor lazy vals can, because:
- //
- // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
- // "Another problem is that the specification allows aggressive
- // optimization of final fields. Within a thread, it is permissible to
- // reorder reads of a final field with those modifications of a final
- // field that do not take place in the constructor."
- //
- // A var or lazy val which is marked final still has meaning to the
- // scala compiler. The word final is heavily overloaded unfortunately;
- // for us it means "not overridable". At present you can't override
- // vars regardless; this may change.
- //
- // The logic does not check .isFinal (which checks flags for the FINAL flag,
- // and includes symbols marked lateFINAL) instead inspecting rawflags so
- // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
- // avoid breaking proxy software which depends on subclassing, we do not
- // emit ACC_FINAL.
- // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
-
- val finalFlag = (
- (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
- && !sym.enclClass.isInterface
- && !sym.isClassConstructor
- && !sym.isMutable // lazy vals and vars both
- )
-
- // Primitives are "abstract final" to prohibit instantiation
- // without having to provide any implementations, but that is an
- // illegal combination of modifiers at the bytecode level so
- // suppress final if abstract if present.
- mkFlags(
- if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
- if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
- if (sym.isInterface) ACC_INTERFACE else 0,
- if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
- if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
- if (sym.isArtifact) ACC_SYNTHETIC else 0,
- if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
- if (sym.isVarargsMethod) ACC_VARARGS else 0,
- if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
- )
- }
- def javaFieldFlags(sym: Symbol) = (
- javaFlags(sym) | mkFlags(
- if (sym hasAnnotation TransientAttr) ACC_TRANSIENT else 0,
- if (sym hasAnnotation VolatileAttr) ACC_VOLATILE else 0,
- if (sym.isMutable) 0 else ACC_FINAL
- )
- )
-
- def isTopLevelModule(sym: Symbol): Boolean =
- afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
-
- def isStaticModule(sym: Symbol): Boolean = {
- sym.isModuleClass && !sym.isImplClass && !sym.isLifted
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
index 540935fd57..50fd59b23f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -20,7 +20,7 @@ trait GenJVMASM {
import definitions._
protected def outputDirectory(sym: Symbol): AbstractFile =
- settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile)
+ settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile)
protected def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
var dir = base
@@ -65,7 +65,7 @@ trait GenJVMASM {
// At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
hasApproximate && {
// Before erasure so we can identify generic mains.
- beforeErasure {
+ enteringErasure {
val companion = sym.linkedClassOfClass
val companionMain = companion.tpe.member(nme.main)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
deleted file mode 100644
index e002a614bd..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import scala.collection.{ mutable, immutable }
-import ch.epfl.lamp.fjbg._
-
-trait GenJVMUtil {
- self: GenJVM =>
-
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
-
- /** Map from type kinds to the Java reference types. It is used for
- * loading class constants. @see Predef.classOf.
- */
- val classLiteral = immutable.Map[TypeKind, JObjectType](
- UNIT -> new JObjectType("java.lang.Void"),
- BOOL -> new JObjectType("java.lang.Boolean"),
- BYTE -> new JObjectType("java.lang.Byte"),
- SHORT -> new JObjectType("java.lang.Short"),
- CHAR -> new JObjectType("java.lang.Character"),
- INT -> new JObjectType("java.lang.Integer"),
- LONG -> new JObjectType("java.lang.Long"),
- FLOAT -> new JObjectType("java.lang.Float"),
- DOUBLE -> new JObjectType("java.lang.Double")
- )
-
- // Don't put this in per run caches.
- private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
- NothingClass -> binarynme.RuntimeNothing,
- RuntimeNothingClass -> binarynme.RuntimeNothing,
- NullClass -> binarynme.RuntimeNull,
- RuntimeNullClass -> binarynme.RuntimeNull
- )
-
- /** This trait may be used by tools who need access to
- * utility methods like javaName and javaType. (for instance,
- * the Eclipse plugin uses it).
- */
- trait BytecodeUtil {
-
- val conds = immutable.Map[TestOp, Int](
- EQ -> JExtendedCode.COND_EQ,
- NE -> JExtendedCode.COND_NE,
- LT -> JExtendedCode.COND_LT,
- GT -> JExtendedCode.COND_GT,
- LE -> JExtendedCode.COND_LE,
- GE -> JExtendedCode.COND_GE
- )
-
- /** Specialized array conversion to prevent calling
- * java.lang.reflect.Array.newInstance via TraversableOnce.toArray
- */
-
- def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
- def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
- /** Return the a name of this symbol that can be used on the Java
- * platform. It removes spaces from names.
- *
- * Special handling:
- * scala.Nothing erases to scala.runtime.Nothing$
- * scala.Null erases to scala.runtime.Null$
- *
- * This is needed because they are not real classes, and they mean
- * 'abrupt termination upon evaluation of that expression' or null respectively.
- * This handling is done already in GenICode, but here we need to remove
- * references from method signatures to these types, because such classes can
- * not exist in the classpath: the type checker will be very confused.
- */
- def javaName(sym: Symbol): String =
- javaNameCache.getOrElseUpdate(sym, {
- if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.javaBinaryName
- else
- sym.javaSimpleName
- }).toString
-
- def javaType(t: TypeKind): JType = (t: @unchecked) match {
- case UNIT => JType.VOID
- case BOOL => JType.BOOLEAN
- case BYTE => JType.BYTE
- case SHORT => JType.SHORT
- case CHAR => JType.CHAR
- case INT => JType.INT
- case LONG => JType.LONG
- case FLOAT => JType.FLOAT
- case DOUBLE => JType.DOUBLE
- case REFERENCE(cls) => new JObjectType(javaName(cls))
- case ARRAY(elem) => new JArrayType(javaType(elem))
- }
-
- def javaType(t: Type): JType = javaType(toTypeKind(t))
-
- def javaType(s: Symbol): JType =
- if (s.isMethod)
- new JMethodType(
- if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType),
- mkArray(s.tpe.paramTypes map javaType)
- )
- else
- javaType(s.tpe)
-
- protected def genConstant(jcode: JExtendedCode, const: Constant) {
- const.tag match {
- case UnitTag => ()
- case BooleanTag => jcode emitPUSH const.booleanValue
- case ByteTag => jcode emitPUSH const.byteValue
- case ShortTag => jcode emitPUSH const.shortValue
- case CharTag => jcode emitPUSH const.charValue
- case IntTag => jcode emitPUSH const.intValue
- case LongTag => jcode emitPUSH const.longValue
- case FloatTag => jcode emitPUSH const.floatValue
- case DoubleTag => jcode emitPUSH const.doubleValue
- case StringTag => jcode emitPUSH const.stringValue
- case NullTag => jcode.emitACONST_NULL()
- case ClazzTag =>
- val kind = toTypeKind(const.typeValue)
- val toPush =
- if (kind.isValueType) classLiteral(kind)
- else javaType(kind).asInstanceOf[JReferenceType]
-
- jcode emitPUSH toPush
-
- case EnumTag =>
- val sym = const.symbolValue
- jcode.emitGETSTATIC(javaName(sym.owner),
- javaName(sym),
- javaType(sym.tpe.underlying))
- case _ =>
- abort("Unknown constant value: " + const)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
deleted file mode 100644
index aaffaa84d8..0000000000
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ /dev/null
@@ -1,2358 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Nikolay Mihaylov
- */
-
-
-package scala.tools.nsc
-package backend.msil
-
-import java.io.{File, IOException}
-import java.nio.{ByteBuffer, ByteOrder}
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.symtab._
-
-import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
-import ch.epfl.lamp.compiler.msil.emit._
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-import scala.language.postfixOps
-
-abstract class GenMSIL extends SubComponent {
- import global._
- import loaders.clrTypes
- import clrTypes.{types, constructors, methods, fields}
- import icodes._
- import icodes.opcodes._
-
- val x = loaders
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new MsilPhase(p)
-
- val phaseName = "msil"
- /** MSIL code generation phase
- */
- class MsilPhase(prev: Phase) extends GlobalPhase(prev) {
- def name = phaseName
- override def newFlags = phaseNewFlags
-
- override def erasedTypes = true
-
- override def run() {
- if (settings.debug.value) inform("[running phase " + name + " on icode]")
-
- val codeGenerator = new BytecodeGenerator
-
- //classes is ICodes.classes, a HashMap[Symbol, IClass]
- classes.values foreach codeGenerator.findEntryPoint
- if( opt.showClass.isDefined && (codeGenerator.entryPoint == null) ) { // TODO introduce dedicated setting instead
- val entryclass = opt.showClass.get.toString
- warning("Couldn't find entry class " + entryclass)
- }
-
- codeGenerator.initAssembly
-
- val classesSorted = classes.values.toList.sortBy(c => c.symbol.id) // simplifies comparing cross-compiler vs. .exe output
- classesSorted foreach codeGenerator.createTypeBuilder
- classesSorted foreach codeGenerator.createClassMembers
-
- try {
- classesSorted foreach codeGenerator.genClass
- } finally {
- codeGenerator.writeAssembly
- }
- }
-
- override def apply(unit: CompilationUnit) {
- abort("MSIL works on icode classes, not on compilation units!")
- }
- }
-
- /**
- * MSIL bytecode generator.
- *
- */
- class BytecodeGenerator {
-
- val MODULE_INSTANCE_NAME = "MODULE$"
-
- import clrTypes.{VOID => MVOID, BOOLEAN => MBOOL, BYTE => MBYTE, SHORT => MSHORT,
- CHAR => MCHAR, INT => MINT, LONG => MLONG, FLOAT => MFLOAT,
- DOUBLE => MDOUBLE, OBJECT => MOBJECT, STRING => MSTRING,
- STRING_ARRAY => MSTRING_ARRAY,
- SYMTAB_CONSTR => SYMTAB_ATTRIBUTE_CONSTRUCTOR,
- SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR}
-
- val EXCEPTION = clrTypes.getType("System.Exception")
- val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE)
-
- val ICLONEABLE = clrTypes.getType("System.ICloneable")
- val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes)
-
- val MMONITOR = clrTypes.getType("System.Threading.Monitor")
- val MMONITOR_ENTER = MMONITOR.GetMethod("Enter", Array(MOBJECT))
- val MMONITOR_EXIT = MMONITOR.GetMethod("Exit", Array(MOBJECT))
-
- val MSTRING_BUILDER = clrTypes.getType("System.Text.StringBuilder")
- val MSTRING_BUILDER_CONSTR = MSTRING_BUILDER.GetConstructor(MsilType.EmptyTypes)
- val MSTRING_BUILDER_TOSTRING = MSTRING_BUILDER.GetMethod("ToString",
- MsilType.EmptyTypes)
-
- val TYPE_FROM_HANDLE =
- clrTypes.getType("System.Type").GetMethod("GetTypeFromHandle", Array(clrTypes.getType("System.RuntimeTypeHandle")))
-
- val INT_PTR = clrTypes.getType("System.IntPtr")
-
- val JOBJECT = definitions.ObjectClass
- val JSTRING = definitions.StringClass
-
- val SystemConvert = clrTypes.getType("System.Convert")
-
- val objParam = Array(MOBJECT)
-
- val toBool: MethodInfo = SystemConvert.GetMethod("ToBoolean", objParam) // see comment in emitUnbox
- val toSByte: MethodInfo = SystemConvert.GetMethod("ToSByte", objParam)
- val toShort: MethodInfo = SystemConvert.GetMethod("ToInt16", objParam)
- val toChar: MethodInfo = SystemConvert.GetMethod("ToChar", objParam)
- val toInt: MethodInfo = SystemConvert.GetMethod("ToInt32", objParam)
- val toLong: MethodInfo = SystemConvert.GetMethod("ToInt64", objParam)
- val toFloat: MethodInfo = SystemConvert.GetMethod("ToSingle", objParam)
- val toDouble: MethodInfo = SystemConvert.GetMethod("ToDouble", objParam)
-
- //val boxedUnit: FieldInfo = msilType(definitions.BoxedUnitModule.info).GetField("UNIT")
- val boxedUnit: FieldInfo = fields(definitions.BoxedUnit_UNIT)
-
- // Scala attributes
- // symtab.Definitions -> object (singleton..)
- val SerializableAttr = definitions.SerializableAttr.tpe
- val CloneableAttr = definitions.CloneableAttr.tpe
- val TransientAtt = definitions.TransientAttr.tpe
- // remoting: the architectures are too different, no mapping (no portable code
- // possible)
-
- // java instance methods that are mapped to static methods in .net
- // these will need to be called with OpCodes.Call (not Callvirt)
- val dynToStatMapped = mutable.HashSet[Symbol]()
-
- initMappings()
-
- /** Create the mappings between java and .net classes and methods */
- private def initMappings() {
- mapType(definitions.AnyClass, MOBJECT)
- mapType(definitions.AnyRefClass, MOBJECT)
- //mapType(definitions.NullClass, clrTypes.getType("scala.AllRef$"))
- //mapType(definitions.NothingClass, clrTypes.getType("scala.All$"))
- // FIXME: for some reason the upper two lines map to null
- mapType(definitions.NullClass, EXCEPTION)
- mapType(definitions.NothingClass, EXCEPTION)
-
- mapType(definitions.BooleanClass, MBOOL)
- mapType(definitions.ByteClass, MBYTE)
- mapType(definitions.ShortClass, MSHORT)
- mapType(definitions.CharClass, MCHAR)
- mapType(definitions.IntClass, MINT)
- mapType(definitions.LongClass, MLONG)
- mapType(definitions.FloatClass, MFLOAT)
- mapType(definitions.DoubleClass, MDOUBLE)
- }
-
- var clasz: IClass = _
- var method: IMethod = _
-
- var massembly: AssemblyBuilder = _
- var mmodule: ModuleBuilder = _
- var mcode: ILGenerator = _
-
- var assemName: String = _
- var firstSourceName = ""
- var outDir: File = _
- var srcPath: File = _
- var moduleName: String = _
-
- def initAssembly() {
-
- assemName = settings.assemname.value
-
- if (assemName == "") {
- if (entryPoint != null) {
- assemName = msilName(entryPoint.enclClass)
- // remove the $ at the end (from module-name)
- assemName = assemName.substring(0, assemName.length() - 1)
- } else {
- // assuming filename of first source file
- assert(firstSourceName.endsWith(".scala"), firstSourceName)
- assemName = firstSourceName.substring(0, firstSourceName.length() - 6)
- }
- } else {
- if (assemName.endsWith(".msil"))
- assemName = assemName.substring(0, assemName.length()-5)
- if (assemName.endsWith(".il"))
- assemName = assemName.substring(0, assemName.length()-3)
- val f: File = new File(assemName)
- assemName = f.getName()
- }
-
- outDir = new File(settings.outdir.value)
-
- srcPath = new File(settings.sourcedir.value)
-
- val assemblyName = new AssemblyName()
- assemblyName.Name = assemName
- massembly = AssemblyBuilderFactory.DefineDynamicAssembly(assemblyName)
-
- moduleName = assemName // + (if (entryPoint == null) ".dll" else ".exe")
- // filename here: .dll or .exe (in both parameters), second: give absolute-path
- mmodule = massembly.DefineDynamicModule(moduleName,
- new File(outDir, moduleName).getAbsolutePath())
- assert (mmodule != null)
- }
-
-
- /**
- * Form of the custom Attribute parameter (Ecma-335.pdf)
- * - p. 163 for CustomAttrib Form,
- * - p. 164 for FixedArg Form (Array and Element) (if array or not is known!)
- * !! least significant byte first if values longer than one byte !!
- *
- * 1: Prolog (unsigned int16, value 0x0001) -> symtab[0] = 0x01, symtab[1] = 0x00
- * 2: FixedArgs (directly the data, get number and types from related constructor)
- * 2.1: length of the array (unsigned int32, 4 bytes, least significant first)
- * 2.2: the byte array data
- * 3: NumNamed (unsigned int16, number of named fields and properties, 0x0000)
- */
- def addSymtabAttribute(sym: Symbol, tBuilder: TypeBuilder) {
- def addMarker() {
- val markerSymtab = new Array[Byte](4)
- markerSymtab(0) = 1.toByte
- tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR, markerSymtab)
- }
-
- // both conditions are needed (why exactly..?)
- if (tBuilder.Name.endsWith("$") || sym.isModuleClass) {
- addMarker()
- } else {
- currentRun.symData.get(sym) match {
- case Some(pickle) =>
- var size = pickle.writeIndex
- val symtab = new Array[Byte](size + 8)
- symtab(0) = 1.toByte
- for (i <- 2 until 6) {
- symtab(i) = (size & 0xff).toByte
- size = size >> 8
- }
- java.lang.System.arraycopy(pickle.bytes, 0, symtab, 6, pickle.writeIndex)
-
- tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_CONSTRUCTOR, symtab)
-
- currentRun.symData -= sym
- currentRun.symData -= sym.companionSymbol
-
- case _ =>
- addMarker()
- }
- }
- }
-
- /**
- * Mutates `member` adding CLR attributes (if any) based on sym.annotations.
- * Please notice that CLR custom modifiers are a different beast (see customModifiers below)
- * and thus shouldn't be added by this method.
- */
- def addAttributes(member: ICustomAttributeSetter, annotations: List[AnnotationInfo]) {
- val attributes = annotations.map(_.atp.typeSymbol).collect {
- case definitions.TransientAttr => null // TODO this is just an example
- }
- return // TODO: implement at some point
- }
-
- /**
- * What's a CLR custom modifier? Intro available as source comments in compiler.msil.CustomModifier.
- * It's basically a marker associated with a location (think of FieldInfo, ParameterInfo, and PropertyInfo)
- * and thus that marker (be it optional or required) becomes part of the signature of that location.
- * Some annotations will become CLR attributes (see addAttributes above), others custom modifiers (this method).
- */
- def customModifiers(annotations: List[AnnotationInfo]): Array[CustomModifier] = {
- annotations.map(_.atp.typeSymbol).collect {
- case definitions.VolatileAttr => new CustomModifier(true, CustomModifier.VolatileMarker)
- } toArray
- }
-
-
-
- /*
- debuglog("creating annotations: " + annotations + " for member : " + member)
- for (annot@ AnnotationInfo(typ, annArgs, nvPairs) <- annotations ;
- if annot.isConstant)
- //!typ.typeSymbol.isJavaDefined
- {
-// assert(consts.length <= 1,
-// "too many constant arguments for annotations; "+consts.toString())
-
- // Problem / TODO having the symbol of the annotations type would be nicer
- // (i hope that type.typeSymbol is the same as the one in types2create)
- // AND: this will crash if the annotations Type is already compiled (-> not a typeBuilder)
- // when this is solved, types2create will be the same as icodes.classes, thus superfluous
- val annType: TypeBuilder = getType(typ.typeSymbol).asInstanceOf[TypeBuilder]
-// val annType: MsilType = getType(typ.typeSymbol)
-
- // Problem / TODO: i have no idea which constructor is used. This
- // information should be available in AnnotationInfo.
- annType.CreateType() // else, GetConstructors can't be used
- val constr: ConstructorInfo = annType.GetConstructors()(0)
- // prevent a second call of CreateType, only needed because there's no
- // other way than GetConstructors()(0) to get the constructor, if there's
- // no constructor symbol available.
-
- val args: Array[Byte] =
- getAttributeArgs(
- annArgs map (_.constant.get),
- (for((n,v) <- nvPairs) yield (n, v.constant.get)))
- member.SetCustomAttribute(constr, args)
- }
- } */
-
-/* def getAttributeArgs(consts: List[Constant], nvPairs: List[(Name, Constant)]): Array[Byte] = {
- val buf = ByteBuffer.allocate(2048) // FIXME: this may be not enough!
- buf.order(ByteOrder.LITTLE_ENDIAN)
- buf.putShort(1.toShort) // signature
-
- def emitSerString(str: String) = {
- // this is wrong, it has to be the length of the UTF-8 byte array, which
- // may be longer (see clr-book on page 302)
-// val length: Int = str.length
- val strBytes: Array[Byte] = try {
- str.getBytes("UTF-8")
- } catch {
- case _: Error => abort("could not get byte-array for string: " + str)
- }
- val length: Int = strBytes.length //this length is stored big-endian
- if (length < 128)
- buf.put(length.toByte)
- else if (length < (1<<14)) {
- buf.put(((length >> 8) | 0x80).toByte) // the bits 14 and 15 of length are '0'
- buf.put((length | 0xff).toByte)
- } else if (length < (1 << 29)) {
- buf.put(((length >> 24) | 0xc0).toByte)
- buf.put(((length >> 16) & 0xff).toByte)
- buf.put(((length >> 8) & 0xff).toByte)
- buf.put(((length ) & 0xff).toByte)
- } else
- abort("string too long for attribute parameter: " + length)
- buf.put(strBytes)
- }
-
- def emitConst(const: Constant): Unit = const.tag match {
- case BooleanTag => buf.put((if (const.booleanValue) 1 else 0).toByte)
- case ByteTag => buf.put(const.byteValue)
- case ShortTag => buf.putShort(const.shortValue)
- case CharTag => buf.putChar(const.charValue)
- case IntTag => buf.putInt(const.intValue)
- case LongTag => buf.putLong(const.longValue)
- case FloatTag => buf.putFloat(const.floatValue)
- case DoubleTag => buf.putDouble(const.doubleValue)
- case StringTag =>
- val str: String = const.stringValue
- if (str == null) {
- buf.put(0xff.toByte)
- } else {
- emitSerString(str)
- }
- case ArrayTag =>
- val arr: Array[Constant] = const.arrayValue
- if (arr == null) {
- buf.putInt(0xffffffff)
- } else {
- buf.putInt(arr.length)
- arr.foreach(emitConst)
- }
-
- // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag, ArrayTag ???
-
- case _ => abort("could not handle attribute argument: " + const)
- }
-
- consts foreach emitConst
- buf.putShort(nvPairs.length.toShort)
- def emitNamedArg(nvPair: (Name, Constant)) {
- // the named argument is a property of the attribute (it can't be a field, since
- // all fields in scala are private)
- buf.put(0x54.toByte)
-
- def emitType(c: Constant) = c.tag match { // type of the constant, Ecma-335.pdf, page 151
- case BooleanTag => buf.put(0x02.toByte)
- case ByteTag => buf.put(0x05.toByte)
- case ShortTag => buf.put(0x06.toByte)
- case CharTag => buf.put(0x07.toByte)
- case IntTag => buf.put(0x08.toByte)
- case LongTag => buf.put(0x0a.toByte)
- case FloatTag => buf.put(0x0c.toByte)
- case DoubleTag => buf.put(0x0d.toByte)
- case StringTag => buf.put(0x0e.toByte)
-
- // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag ???
-
- // ArrayTag falls in here
- case _ => abort("could not handle attribute argument: " + c)
- }
-
- val cnst: Constant = nvPair._2
- if (cnst.tag == ArrayTag) {
- buf.put(0x1d.toByte)
- emitType(cnst.arrayValue(0)) // FIXME: will crash if array length = 0
- } else if (cnst.tag == EnumTag) {
- buf.put(0x55.toByte)
- // TODO: put a SerString (don't know what exactly, names of the enums somehow..)
- } else {
- buf.put(0x51.toByte)
- emitType(cnst)
- }
-
- emitSerString(nvPair._1.toString)
- emitConst(nvPair._2)
- }
-
- val length = buf.position()
- buf.array().slice(0, length)
- } */
-
- def writeAssembly() {
- if (entryPoint != null) {
- assert(entryPoint.enclClass.isModuleClass, entryPoint.enclClass)
- val mainMethod = methods(entryPoint)
- val stringArrayTypes: Array[MsilType] = Array(MSTRING_ARRAY)
- val globalMain = mmodule.DefineGlobalMethod(
- "Main", MethodAttributes.Public | MethodAttributes.Static,
- MVOID, stringArrayTypes)
- globalMain.DefineParameter(0, ParameterAttributes.None, "args")
- massembly.SetEntryPoint(globalMain)
- val code = globalMain.GetILGenerator()
- val moduleField = getModuleInstanceField(entryPoint.enclClass)
- code.Emit(OpCodes.Ldsfld, moduleField)
- code.Emit(OpCodes.Ldarg_0)
- code.Emit(OpCodes.Callvirt, mainMethod)
- code.Emit(OpCodes.Ret)
- }
- createTypes()
- var outDirName: String = null
- try {
- if (settings.Ygenjavap.isDefault) { // we reuse the JVM-sounding setting because it's conceptually similar
- outDirName = outDir.getPath()
- massembly.Save(outDirName + "\\" + assemName + ".msil") /* use SingleFileILPrinterVisitor */
- } else {
- outDirName = srcPath.getPath()
- massembly.Save(settings.Ygenjavap.value, outDirName) /* use MultipleFilesILPrinterVisitor */
- }
- } catch {
- case e:IOException => abort("Could not write to " + outDirName + ": " + e.getMessage())
- }
- }
-
- private def createTypes() {
- for (sym <- classes.keys) {
- val iclass = classes(sym)
- val tBuilder = types(sym).asInstanceOf[TypeBuilder]
-
- debuglog("Calling CreatType for " + sym + ", " + tBuilder.toString)
-
- tBuilder.CreateType()
- tBuilder.setSourceFilepath(iclass.cunit.source.file.path)
- }
- }
-
- private[GenMSIL] def ilasmFileName(iclass: IClass) : String = {
- // method.sourceFile contains just the filename
- iclass.cunit.source.file.toString.replace("\\", "\\\\")
- }
-
- private[GenMSIL] def genClass(iclass: IClass) {
- val sym = iclass.symbol
- debuglog("Generating class " + sym + " flags: " + Flags.flagsToString(sym.flags))
- clasz = iclass
-
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
- if (isCloneable(sym)) {
- // FIXME: why there's no nme.clone_ ?
- // "Clone": if the code is non-portable, "Clone" is defined, not "clone"
- // TODO: improve condition (should override AnyRef.clone)
- if (iclass.methods.forall(m => {
- !((m.symbol.name.toString != "clone" || m.symbol.name.toString != "Clone") &&
- m.symbol.tpe.paramTypes.length != 0)
- })) {
- debuglog("auto-generating cloneable method for " + sym)
- val attrs: Short = (MethodAttributes.Public | MethodAttributes.Virtual |
- MethodAttributes.HideBySig).toShort
- val cloneMethod = tBuilder.DefineMethod("Clone", attrs, MOBJECT,
- MsilType.EmptyTypes)
- val clCode = cloneMethod.GetILGenerator()
- clCode.Emit(OpCodes.Ldarg_0)
- clCode.Emit(OpCodes.Call, MEMBERWISE_CLONE)
- clCode.Emit(OpCodes.Ret)
- }
- }
-
- val line = sym.pos.line
- tBuilder.setPosition(line, ilasmFileName(iclass))
-
- if (isTopLevelModule(sym)) {
- if (sym.companionClass == NoSymbol)
- generateMirrorClass(sym)
- else
- log("No mirror class for module with linked class: " +
- sym.fullName)
- }
-
- addSymtabAttribute(sym, tBuilder)
- addAttributes(tBuilder, sym.annotations)
-
- if (iclass.symbol != definitions.ArrayClass)
- iclass.methods foreach genMethod
-
- } //genClass
-
-
- private def genMethod(m: IMethod) {
- debuglog("Generating method " + m.symbol + " flags: " + Flags.flagsToString(m.symbol.flags) +
- " owner: " + m.symbol.owner)
- method = m
- localBuilders.clear
- computeLocalVarsIndex(m)
-
- if (m.symbol.isClassConstructor) {
- mcode = constructors(m.symbol).asInstanceOf[ConstructorBuilder].GetILGenerator()
- } else {
- val mBuilder = methods(m.symbol).asInstanceOf[MethodBuilder]
- if (!mBuilder.IsAbstract())
- try {
- mcode = mBuilder.GetILGenerator()
- } catch {
- case e: Exception =>
- java.lang.System.out.println("m.symbol = " + Flags.flagsToString(m.symbol.flags) + " " + m.symbol)
- java.lang.System.out.println("m.symbol.owner = " + Flags.flagsToString(m.symbol.owner.flags) + " " + m.symbol.owner)
- java.lang.System.out.println("mBuilder = " + mBuilder)
- java.lang.System.out.println("mBuilder.DeclaringType = " +
- TypeAttributes.toString(mBuilder.DeclaringType.Attributes) +
- "::" + mBuilder.DeclaringType)
- throw e
- }
- else
- mcode = null
- }
-
- if (mcode != null) {
- for (local <- m.locals ; if !(m.params contains local)) {
- debuglog("add local var: " + local + ", of kind " + local.kind)
- val t: MsilType = msilType(local.kind)
- val localBuilder = mcode.DeclareLocal(t)
- localBuilder.SetLocalSymInfo(msilName(local.sym))
- localBuilders(local) = localBuilder
- }
- genCode(m)
- }
-
- }
-
- /** Special linearizer for methods with at least one exception handler. This
- * linearizer brings all basic blocks in the right order so that nested
- * try-catch and try-finally blocks can be emitted.
- */
- val msilLinearizer = new MSILLinearizer()
-
- val labels = mutable.HashMap[BasicBlock, Label]()
-
- /* when emitting .line, it's enough to include the full filename just once per method, thus reducing filesize.
- * this scheme relies on the fact that the entry block is emitted first. */
- var dbFilenameSeen = false
-
- def genCode(m: IMethod) {
-
- def makeLabels(blocks: List[BasicBlock]) = {
- debuglog("Making labels for: " + method)
- for (bb <- blocks) labels(bb) = mcode.DefineLabel()
- }
-
- labels.clear
-
- var linearization = if(!m.exh.isEmpty) msilLinearizer.linearize(m)
- else linearizer.linearize(m)
-
- if (!m.exh.isEmpty)
- linearization = computeExceptionMaps(linearization, m)
-
- makeLabels(linearization)
-
- // debug val blocksInM = m.code.blocks.toList.sortBy(bb => bb.label)
- // debug val blocksInL = linearization.sortBy(bb => bb.label)
- // debug val MButNotL = (blocksInM.toSet) diff (blocksInL.toSet) // if non-empty, a jump to B fails to find a label for B (case CJUMP, case CZJUMP)
- // debug if(!MButNotL.isEmpty) { }
-
- dbFilenameSeen = false
- genBlocks(linearization)
-
- // RETURN inside exception blocks are replaced by Leave. The target of the
- // leave is a `Ret` outside any exception block (generated here).
- if (handlerReturnMethod == m) {
- mcode.MarkLabel(handlerReturnLabel)
- if (handlerReturnKind != UNIT)
- mcode.Emit(OpCodes.Ldloc, handlerReturnLocal)
- mcode.Emit(OpCodes.Ret)
- }
-
- beginExBlock.clear()
- beginCatchBlock.clear()
- endExBlock.clear()
- endFinallyLabels.clear()
- }
-
- def genBlocks(blocks: List[BasicBlock], previous: BasicBlock = null) {
- blocks match {
- case Nil => ()
- case x :: Nil => genBlock(x, prev = previous, next = null)
- case x :: y :: ys => genBlock(x, prev = previous, next = y); genBlocks(y :: ys, previous = x)
- }
- }
-
- // the try blocks starting at a certain BasicBlock
- val beginExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
- // the catch blocks starting / endling at a certain BasicBlock
- val beginCatchBlock = mutable.HashMap[BasicBlock, ExceptionHandler]()
- val endExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
- /** When emitting the code (genBlock), the number of currently active try / catch
- * blocks. When seeing a `RETURN` inside a try / catch, we need to
- * - store the result in a local (if it's not UNIT)
- * - emit `Leave handlerReturnLabel` instead of the Return
- * - emit code at the end: load the local and return its value
- */
- var currentHandlers = new mutable.Stack[ExceptionHandler]
- // The IMethod the Local/Label/Kind below belong to
- var handlerReturnMethod: IMethod = _
- // Stores the result when returning inside an exception block
- var handlerReturnLocal: LocalBuilder = _
- // Label for a return instruction outside any exception block
- var handlerReturnLabel: Label = _
- // The result kind.
- var handlerReturnKind: TypeKind = _
- def returnFromHandler(kind: TypeKind): (LocalBuilder, Label) = {
- if (handlerReturnMethod != method) {
- handlerReturnMethod = method
- if (kind != UNIT) {
- handlerReturnLocal = mcode.DeclareLocal(msilType(kind))
- handlerReturnLocal.SetLocalSymInfo("$handlerReturn")
- }
- handlerReturnLabel = mcode.DefineLabel()
- handlerReturnKind = kind
- }
- (handlerReturnLocal, handlerReturnLabel)
- }
-
- /** For try/catch nested inside a finally, we can't use `Leave OutsideFinally`, the
- * Leave target has to be inside the finally (and it has to be the `endfinally` instruction).
- * So for every finalizer, we have a label which marks the place of the `endfinally`,
- * nested try/catch blocks will leave there.
- */
- val endFinallyLabels = mutable.HashMap[ExceptionHandler, Label]()
-
- /** Computes which blocks are the beginning / end of a try or catch block */
- private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = {
- val visitedBlocks = new mutable.HashSet[BasicBlock]()
-
- // handlers which have not been introduced so far
- var openHandlers = m.exh
-
-
- /** Example
- * try {
- * try {
- * // *1*
- * } catch {
- * case h1 =>
- * }
- * } catch {
- * case h2 =>
- * case h3 =>
- * try {
- *
- * } catch {
- * case h4 => // *2*
- * case h5 =>
- * }
- * }
- */
-
- // Stack of nested try blocks. Each bloc has a List of ExceptionHandler (multiple
- // catch statements). Example *1*: Stack(List(h2, h3), List(h1))
- val currentTryHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
- // Stack of nested catch blocks. The head of the list is the current catch block. The
- // tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5))
- val currentCatchHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
- for (b <- blocks) {
-
- // are we past the current catch blocks?
- def endHandlers(): List[ExceptionHandler] = {
- var res: List[ExceptionHandler] = Nil
- if (!currentCatchHandlers.isEmpty) {
- val handler = currentCatchHandlers.top.head
- if (!handler.blocks.contains(b)) {
- // all blocks of the handler are either visited, or not part of the linearization (i.e. dead)
- assert(handler.blocks.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
- "Bad linearization of basic blocks inside catch. Found block not part of the handler\n"+
- b.fullString +"\nwhile in catch-part of\n"+ handler)
-
- val rest = currentCatchHandlers.pop.tail
- if (rest.isEmpty) {
- // all catch blocks of that exception handler are covered
- res = handler :: endHandlers()
- } else {
- // there are more catch blocks for that try (handlers covering the same)
- currentCatchHandlers.push(rest)
- beginCatchBlock(b) = rest.head
- }
- }
- }
- res
- }
- val end = endHandlers()
- if (!end.isEmpty) endExBlock(b) = end
-
- // are we past the current try block?
- if (!currentTryHandlers.isEmpty) {
- val handler = currentTryHandlers.top.head
- if (!handler.covers(b)) {
- // all of the covered blocks are visited, or not part of the linearization
- assert(handler.covered.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
- "Bad linearization of basic blocks inside try. Found non-covered block\n"+
- b.fullString +"\nwhile in try-part of\n"+ handler)
-
- assert(handler.startBlock == b,
- "Bad linearization of basic blocks. The entry block of a catch does not directly follow the try\n"+
- b.fullString +"\n"+ handler)
-
- val handlers = currentTryHandlers.pop
- currentCatchHandlers.push(handlers)
- beginCatchBlock(b) = handler
- }
- }
-
- // are there try blocks starting at b?
- val (newHandlers, stillOpen) = openHandlers.partition(_.covers(b))
- openHandlers = stillOpen
-
- val newHandlersBySize = newHandlers.groupBy(_.covered.size)
- // big handlers first, smaller ones are nested inside the try of the big one
- // (checked by the assertions below)
- val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
-
- val beginHandlers = new mutable.ListBuffer[ExceptionHandler]
- for (s <- sizes) {
- val sHandlers = newHandlersBySize(s)
- for (h <- sHandlers) {
- assert(h.covered == sHandlers.head.covered,
- "bad nesting of exception handlers. same size, but not covering same blocks\n"+
- h +"\n"+ sHandlers.head)
- assert(h.resultKind == sHandlers.head.resultKind,
- "bad nesting of exception handlers. same size, but the same resultKind\n"+
- h +"\n"+ sHandlers.head)
- }
- for (bigger <- beginHandlers; h <- sHandlers) {
- assert(h.covered.subsetOf(bigger.covered),
- "bad nesting of exception handlers. try blocks of smaller handler are not nested in bigger one.\n"+
- h +"\n"+ bigger)
- assert(h.blocks.toSet.subsetOf(bigger.covered),
- "bad nesting of exception handlers. catch blocks of smaller handler are not nested in bigger one.\n"+
- h +"\n"+ bigger)
- }
- beginHandlers += sHandlers.head
- currentTryHandlers.push(sHandlers)
- }
- beginExBlock(b) = beginHandlers.toList
- visitedBlocks += b
- }
-
- // if there handlers left (i.e. handlers covering nothing, or a
- // non-existent (dead) block), remove their catch-blocks.
- val liveBlocks = if (openHandlers.isEmpty) blocks else {
- blocks.filter(b => openHandlers.forall(h => !h.blocks.contains(b)))
- }
-
- /** There might be open handlers, but no more blocks. happens when try/catch end
- * with `throw` or `return`
- * def foo() { try { .. throw } catch { _ => .. throw } }
- *
- * In this case we need some code after the catch block for the auto-generated
- * `leave` instruction. So we're adding a (dead) `throw new Exception`.
- */
- val rest = currentCatchHandlers.map(handlers => {
- assert(handlers.length == 1, handlers)
- handlers.head
- }).toList
-
- if (rest.isEmpty) {
- liveBlocks
- } else {
- val b = m.code.newBlock
- b.emit(Seq(
- NEW(REFERENCE(definitions.ThrowableClass)),
- DUP(REFERENCE(definitions.ObjectClass)),
- CALL_METHOD(definitions.ThrowableClass.primaryConstructor, Static(true)),
- THROW(definitions.ThrowableClass)
- ))
- b.close
- endExBlock(b) = rest
- liveBlocks ::: List(b)
- }
- }
-
- /**
- * @param block the BasicBlock to emit code for
- * @param next the following BasicBlock, `null` if `block` is the last one
- */
- def genBlock(block: BasicBlock, prev: BasicBlock, next: BasicBlock) {
-
- def loadLocalOrAddress(local: Local, msg : String , loadAddr : Boolean) {
- debuglog(msg + " for " + local)
- val isArg = local.arg
- val i = local.index
- if (isArg)
- loadArg(mcode, loadAddr)(i)
- else
- loadLocal(i, local, mcode, loadAddr)
- }
-
- def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) {
- debuglog(msg + " with owner: " + field.owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- var fieldInfo = fields.get(field) match {
- case Some(fInfo) => fInfo
- case None =>
- val fInfo = getType(field.owner).GetField(msilName(field))
- fields(field) = fInfo
- fInfo
- }
- if (fieldInfo.IsVolatile) {
- mcode.Emit(OpCodes.Volatile)
- }
- if (!fieldInfo.IsLiteral) {
- if (loadAddr) {
- mcode.Emit(if (isStatic) OpCodes.Ldsflda else OpCodes.Ldflda, fieldInfo)
- } else {
- mcode.Emit(if (isStatic) OpCodes.Ldsfld else OpCodes.Ldfld, fieldInfo)
- }
- } else {
- assert(!loadAddr, "can't take AddressOf a literal field (not even with readonly. prefix) because no memory was allocated to such field ...")
- // TODO the above can be overcome by loading the value, boxing, and finally unboxing. An address to a copy of the raw value will be on the stack.
- /* We perform `field inlining' as required by CLR.
- * Emit as for a CONSTANT ICode stmt, with the twist that the constant value is available
- * as a java.lang.Object and its .NET type allows constant initialization in CLR, i.e. that type
- * is one of I1, I2, I4, I8, R4, R8, CHAR, BOOLEAN, STRING, or CLASS (in this last case,
- * only accepting nullref as value). See Table 9-1 in Lidin's book on ILAsm. */
- val value = fieldInfo.getValue()
- if (value == null) {
- mcode.Emit(OpCodes.Ldnull)
- } else {
- val typ = if (fieldInfo.FieldType.IsEnum) fieldInfo.FieldType.getUnderlyingType
- else fieldInfo.FieldType
- if (typ == clrTypes.STRING) {
- mcode.Emit(OpCodes.Ldstr, value.asInstanceOf[String])
- } else if (typ == clrTypes.BOOLEAN) {
- mcode.Emit(if (value.asInstanceOf[Boolean]) OpCodes.Ldc_I4_1
- else OpCodes.Ldc_I4_0)
- } else if (typ == clrTypes.BYTE || typ == clrTypes.UBYTE) {
- loadI4(value.asInstanceOf[Byte], mcode)
- } else if (typ == clrTypes.SHORT || typ == clrTypes.USHORT) {
- loadI4(value.asInstanceOf[Int], mcode)
- } else if (typ == clrTypes.CHAR) {
- loadI4(value.asInstanceOf[Char], mcode)
- } else if (typ == clrTypes.INT || typ == clrTypes.UINT) {
- loadI4(value.asInstanceOf[Int], mcode)
- } else if (typ == clrTypes.LONG || typ == clrTypes.ULONG) {
- mcode.Emit(OpCodes.Ldc_I8, value.asInstanceOf[Long])
- } else if (typ == clrTypes.FLOAT) {
- mcode.Emit(OpCodes.Ldc_R4, value.asInstanceOf[Float])
- } else if (typ == clrTypes.DOUBLE) {
- mcode.Emit(OpCodes.Ldc_R8, value.asInstanceOf[Double])
- } else {
- /* TODO one more case is described in Partition II, 16.2: bytearray(...) */
- abort("Unknown type for static literal field: " + fieldInfo)
- }
- }
- }
- }
-
- /** Creating objects works differently on .NET. On the JVM
- * - NEW(type) => reference on Stack
- * - DUP, load arguments, CALL_METHOD(constructor)
- *
- * On .NET, the NEW and DUP are ignored, but we emit a special method call
- * - load arguments
- * - NewObj(constructor) => reference on stack
- *
- * This variable tells whether the previous instruction was a NEW,
- * we expect a DUP which is not emitted. */
- var previousWasNEW = false
-
- var lastLineNr: Int = 0
- var lastPos: Position = NoPosition
-
-
- // EndExceptionBlock must happen before MarkLabel because it adds the
- // Leave instruction. Otherwise, labels(block) points to the Leave
- // (inside the catch) instead of the instruction afterwards.
- for (handlers <- endExBlock.get(block); exh <- handlers) {
- currentHandlers.pop()
- for (l <- endFinallyLabels.get(exh))
- mcode.MarkLabel(l)
- mcode.EndExceptionBlock()
- }
-
- mcode.MarkLabel(labels(block))
- debuglog("Generating code for block: " + block)
-
- for (handler <- beginCatchBlock.get(block)) {
- if (!currentHandlers.isEmpty && currentHandlers.top.covered == handler.covered) {
- currentHandlers.pop()
- currentHandlers.push(handler)
- }
- if (handler.cls == NoSymbol) {
- // `finally` blocks are represented the same as `catch`, but with no catch-type
- mcode.BeginFinallyBlock()
- } else {
- val t = getType(handler.cls)
- mcode.BeginCatchBlock(t)
- }
- }
- for (handlers <- beginExBlock.get(block); exh <- handlers) {
- currentHandlers.push(exh)
- mcode.BeginExceptionBlock()
- }
-
- for (instr <- block) {
- try {
- val currentLineNr = instr.pos.line
- val skip = if(instr.pos.isRange) instr.pos.sameRange(lastPos) else (currentLineNr == lastLineNr);
- if(!skip || !dbFilenameSeen) {
- val fileName = if(dbFilenameSeen) "" else {dbFilenameSeen = true; ilasmFileName(clasz)};
- if(instr.pos.isRange) {
- val startLine = instr.pos.focusStart.line
- val endLine = instr.pos.focusEnd.line
- val startCol = instr.pos.focusStart.column
- val endCol = instr.pos.focusEnd.column
- mcode.setPosition(startLine, endLine, startCol, endCol, fileName)
- } else {
- mcode.setPosition(instr.pos.line, fileName)
- }
- lastLineNr = currentLineNr
- lastPos = instr.pos
- }
- } catch { case _: UnsupportedOperationException => () }
-
- if (previousWasNEW)
- assert(instr.isInstanceOf[DUP], block)
-
- instr match {
- case THIS(clasz) =>
- mcode.Emit(OpCodes.Ldarg_0)
-
- case CONSTANT(const) =>
- const.tag match {
- case UnitTag => ()
- case BooleanTag => mcode.Emit(if (const.booleanValue) OpCodes.Ldc_I4_1
- else OpCodes.Ldc_I4_0)
- case ByteTag => loadI4(const.byteValue, mcode)
- case ShortTag => loadI4(const.shortValue, mcode)
- case CharTag => loadI4(const.charValue, mcode)
- case IntTag => loadI4(const.intValue, mcode)
- case LongTag => mcode.Emit(OpCodes.Ldc_I8, const.longValue)
- case FloatTag => mcode.Emit(OpCodes.Ldc_R4, const.floatValue)
- case DoubleTag => mcode.Emit(OpCodes.Ldc_R8, const.doubleValue)
- case StringTag => mcode.Emit(OpCodes.Ldstr, const.stringValue)
- case NullTag => mcode.Emit(OpCodes.Ldnull)
- case ClazzTag =>
- mcode.Emit(OpCodes.Ldtoken, msilType(const.typeValue))
- mcode.Emit(OpCodes.Call, TYPE_FROM_HANDLE)
- case _ => abort("Unknown constant value: " + const)
- }
-
- case LOAD_ARRAY_ITEM(kind) =>
- (kind: @unchecked) match {
- case BOOL => mcode.Emit(OpCodes.Ldelem_I1)
- case BYTE => mcode.Emit(OpCodes.Ldelem_I1) // I1 for System.SByte, i.e. a scala.Byte
- case SHORT => mcode.Emit(OpCodes.Ldelem_I2)
- case CHAR => mcode.Emit(OpCodes.Ldelem_U2)
- case INT => mcode.Emit(OpCodes.Ldelem_I4)
- case LONG => mcode.Emit(OpCodes.Ldelem_I8)
- case FLOAT => mcode.Emit(OpCodes.Ldelem_R4)
- case DOUBLE => mcode.Emit(OpCodes.Ldelem_R8)
- case REFERENCE(cls) => mcode.Emit(OpCodes.Ldelem_Ref)
- case ARRAY(elem) => mcode.Emit(OpCodes.Ldelem_Ref)
-
- // case UNIT is not possible: an Array[Unit] will be an
- // Array[scala.runtime.BoxedUnit] (-> case REFERENCE)
- }
-
- case LOAD_LOCAL(local) => loadLocalOrAddress(local, "load_local", false)
-
- case CIL_LOAD_LOCAL_ADDRESS(local) => loadLocalOrAddress(local, "cil_load_local_address", true)
-
- case LOAD_FIELD(field, isStatic) => loadFieldOrAddress(field, isStatic, "load_field", false)
-
- case CIL_LOAD_FIELD_ADDRESS(field, isStatic) => loadFieldOrAddress(field, isStatic, "cil_load_field_address", true)
-
- case CIL_LOAD_ARRAY_ITEM_ADDRESS(kind) => mcode.Emit(OpCodes.Ldelema, msilType(kind))
-
- case CIL_NEWOBJ(msym) =>
- assert(msym.isClassConstructor)
- val constructorInfo: ConstructorInfo = getConstructor(msym)
- mcode.Emit(OpCodes.Newobj, constructorInfo)
-
- case LOAD_MODULE(module) =>
- debuglog("Generating LOAD_MODULE for: " + showsym(module))
- mcode.Emit(OpCodes.Ldsfld, getModuleInstanceField(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- (kind: @unchecked) match {
- case BOOL => mcode.Emit(OpCodes.Stelem_I1)
- case BYTE => mcode.Emit(OpCodes.Stelem_I1)
- case SHORT => mcode.Emit(OpCodes.Stelem_I2)
- case CHAR => mcode.Emit(OpCodes.Stelem_I2)
- case INT => mcode.Emit(OpCodes.Stelem_I4)
- case LONG => mcode.Emit(OpCodes.Stelem_I8)
- case FLOAT => mcode.Emit(OpCodes.Stelem_R4)
- case DOUBLE => mcode.Emit(OpCodes.Stelem_R8)
- case REFERENCE(cls) => mcode.Emit(OpCodes.Stelem_Ref)
- case ARRAY(elem) => mcode.Emit(OpCodes.Stelem_Ref) // @TODO: test this! (occurs when calling a Array[Object]* vararg param method)
-
- // case UNIT not possible (see comment at LOAD_ARRAY_ITEM)
- }
-
- case STORE_LOCAL(local) =>
- val isArg = local.arg
- val i = local.index
- debuglog("store_local for " + local + ", index " + i)
-
- // there are some locals defined by the compiler that
- // are isArg and are need to be stored.
- if (isArg) {
- if (i >= -128 && i <= 127)
- mcode.Emit(OpCodes.Starg_S, i)
- else
- mcode.Emit(OpCodes.Starg, i)
- } else {
- i match {
- case 0 => mcode.Emit(OpCodes.Stloc_0)
- case 1 => mcode.Emit(OpCodes.Stloc_1)
- case 2 => mcode.Emit(OpCodes.Stloc_2)
- case 3 => mcode.Emit(OpCodes.Stloc_3)
- case _ =>
- if (i >= -128 && i <= 127)
- mcode.Emit(OpCodes.Stloc_S, localBuilders(local))
- else
- mcode.Emit(OpCodes.Stloc, localBuilders(local))
- }
- }
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- mcode.Emit(OpCodes.Starg_S, 0)
-
- case STORE_FIELD(field, isStatic) =>
- val fieldInfo = fields.get(field) match {
- case Some(fInfo) => fInfo
- case None =>
- val fInfo = getType(field.owner).GetField(msilName(field))
- fields(field) = fInfo
- fInfo
- }
- mcode.Emit(if (isStatic) OpCodes.Stsfld else OpCodes.Stfld, fieldInfo)
-
- case CALL_PRIMITIVE(primitive) =>
- genPrimitive(primitive, instr.pos)
-
- case CALL_METHOD(msym, style) =>
- if (msym.isClassConstructor) {
- val constructorInfo: ConstructorInfo = getConstructor(msym)
- (style: @unchecked) match {
- // normal constructor calls are Static..
- case Static(_) =>
- if (method.symbol.isClassConstructor && method.symbol.owner == msym.owner)
- // we're generating a constructor (method: IMethod is a constructor), and we're
- // calling another constructor of the same class.
-
- // @LUC TODO: this can probably break, namely when having: class A { def this() { new A() } }
- // instead, we should instruct the CALL_METHOD with additional information, know whether it's
- // an instance creation constructor call or not.
- mcode.Emit(OpCodes.Call, constructorInfo)
- else
- mcode.Emit(OpCodes.Newobj, constructorInfo)
- case SuperCall(_) =>
- mcode.Emit(OpCodes.Call, constructorInfo)
- if (isStaticModule(clasz.symbol) &&
- notInitializedModules.contains(clasz.symbol) &&
- method.symbol.isClassConstructor)
- {
- notInitializedModules -= clasz.symbol
- mcode.Emit(OpCodes.Ldarg_0)
- mcode.Emit(OpCodes.Stsfld, getModuleInstanceField(clasz.symbol))
- }
- }
-
- } else {
-
- var doEmit = true
- getTypeOpt(msym.owner) match {
- case Some(typ) if (typ.IsEnum) => {
- def negBool() = {
- mcode.Emit(OpCodes.Ldc_I4_0)
- mcode.Emit(OpCodes.Ceq)
- }
- doEmit = false
- val name = msym.name
- if (name eq nme.EQ) { mcode.Emit(OpCodes.Ceq) }
- else if (name eq nme.NE) { mcode.Emit(OpCodes.Ceq); negBool }
- else if (name eq nme.LT) { mcode.Emit(OpCodes.Clt) }
- else if (name eq nme.LE) { mcode.Emit(OpCodes.Cgt); negBool }
- else if (name eq nme.GT) { mcode.Emit(OpCodes.Cgt) }
- else if (name eq nme.GE) { mcode.Emit(OpCodes.Clt); negBool }
- else if (name eq nme.OR) { mcode.Emit(OpCodes.Or) }
- else if (name eq nme.AND) { mcode.Emit(OpCodes.And) }
- else if (name eq nme.XOR) { mcode.Emit(OpCodes.Xor) }
- else
- doEmit = true
- }
- case _ => ()
- }
-
- // method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType
- val (isDelegateView, paramType, resType) = beforeTyper {
- msym.tpe match {
- case MethodType(params, resultType)
- if (params.length == 1 && msym.name == nme.view_) =>
- val paramType = params(0).tpe
- val isDel = definitions.isCorrespondingDelegate(resultType, paramType)
- (isDel, paramType, resultType)
- case _ => (false, null, null)
- }
- }
- if (doEmit && isDelegateView) {
- doEmit = false
- createDelegateCaller(paramType, resType)
- }
-
- if (doEmit &&
- (msym.name == nme.PLUS || msym.name == nme.MINUS)
- && clrTypes.isDelegateType(msilType(msym.owner.tpe)))
- {
- doEmit = false
- val methodInfo: MethodInfo = getMethod(msym)
- // call it as a static method, even if the compiler (symbol) thinks it's virtual
- mcode.Emit(OpCodes.Call, methodInfo)
- mcode.Emit(OpCodes.Castclass, msilType(msym.owner.tpe))
- }
-
- if (doEmit && definitions.Delegate_scalaCallers.contains(msym)) {
- doEmit = false
- val methodSym: Symbol = definitions.Delegate_scalaCallerTargets(msym)
- val delegateType: Type = msym.tpe match {
- case MethodType(_, retType) => retType
- case _ => abort("not a method type: " + msym.tpe)
- }
- val methodInfo: MethodInfo = getMethod(methodSym)
- val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
- if (methodSym.isStatic) {
- mcode.Emit(OpCodes.Ldftn, methodInfo)
- } else {
- mcode.Emit(OpCodes.Dup)
- mcode.Emit(OpCodes.Ldvirtftn, methodInfo)
- }
- mcode.Emit(OpCodes.Newobj, delegCtor)
- }
-
- if (doEmit) {
- val methodInfo: MethodInfo = getMethod(msym)
- (style: @unchecked) match {
- case SuperCall(_) =>
- mcode.Emit(OpCodes.Call, methodInfo)
- case Dynamic =>
- // methodInfo.DeclaringType is null for global methods
- val isValuetypeMethod = (methodInfo.DeclaringType ne null) && (methodInfo.DeclaringType.IsValueType)
- val isValuetypeVirtualMethod = isValuetypeMethod && (methodInfo.IsVirtual)
- if (dynToStatMapped(msym)) {
- mcode.Emit(OpCodes.Call, methodInfo)
- } else if (isValuetypeVirtualMethod) {
- mcode.Emit(OpCodes.Constrained, methodInfo.DeclaringType)
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- } else if (isValuetypeMethod) {
- // otherwise error "Callvirt on a value type method" ensues
- mcode.Emit(OpCodes.Call, methodInfo)
- } else {
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- }
- case Static(_) =>
- if(methodInfo.IsVirtual && !mcode.Ldarg0WasJustEmitted) {
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- } else mcode.Emit(OpCodes.Call, methodInfo)
- }
- }
- }
-
- case BOX(boxType) =>
- emitBox(mcode, boxType)
-
- case UNBOX(boxType) =>
- emitUnbox(mcode, boxType)
-
- case CIL_UNBOX(boxType) =>
- mcode.Emit(OpCodes.Unbox, msilType(boxType))
-
- case CIL_INITOBJ(valueType) =>
- mcode.Emit(OpCodes.Initobj, msilType(valueType))
-
- case NEW(REFERENCE(cls)) =>
- // the next instruction must be a DUP, see comment on `var previousWasNEW`
- previousWasNEW = true
-
- // works also for arrays and reference-types
- case CREATE_ARRAY(elem, dims) =>
- // TODO: handle multi dimensional arrays
- assert(dims == 1, "Can't handle multi dimensional arrays")
- mcode.Emit(OpCodes.Newarr, msilType(elem))
-
- // works for arrays and reference-types
- case IS_INSTANCE(tpe) =>
- mcode.Emit(OpCodes.Isinst, msilType(tpe))
- mcode.Emit(OpCodes.Ldnull)
- mcode.Emit(OpCodes.Ceq)
- mcode.Emit(OpCodes.Ldc_I4_0)
- mcode.Emit(OpCodes.Ceq)
-
- // works for arrays and reference-types
- // part from the scala reference: "S <: T does not imply
- // Array[S] <: Array[T] in Scala. However, it is possible
- // to cast an array of S to an array of T if such a cast
- // is permitted in the host environment."
- case CHECK_CAST(tpknd) =>
- val tMSIL = msilType(tpknd)
- mcode.Emit(OpCodes.Castclass, tMSIL)
-
- // no SWITCH is generated when there's
- // - a default case ("case _ => ...") in the matching expr
- // - OR is used ("case 1 | 2 => ...")
- case SWITCH(tags, branches) =>
- // tags is List[List[Int]]; a list of integers for every label.
- // if the int on stack is 4, and 4 is in the second list => jump
- // to second label
- // branches is List[BasicBlock]
- // the labels to jump to (the last one is the default one)
-
- val switchLocal = mcode.DeclareLocal(MINT)
- // several switch variables will appear with the same name in the
- // assembly code, but this makes no truble
- switchLocal.SetLocalSymInfo("$switch_var")
-
- mcode.Emit(OpCodes.Stloc, switchLocal)
- var i = 0
- for (l <- tags) {
- var targetLabel = labels(branches(i))
- for (i <- l) {
- mcode.Emit(OpCodes.Ldloc, switchLocal)
- loadI4(i, mcode)
- mcode.Emit(OpCodes.Beq, targetLabel)
- }
- i += 1
- }
- val defaultTarget = labels(branches(i))
- if (next != branches(i))
- mcode.Emit(OpCodes.Br, defaultTarget)
-
- case JUMP(whereto) =>
- val (leaveHandler, leaveFinally, lfTarget) = leavesHandler(block, whereto)
- if (leaveHandler) {
- if (leaveFinally) {
- if (lfTarget.isDefined) mcode.Emit(OpCodes.Leave, lfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(whereto))
- } else if (next != whereto)
- mcode.Emit(OpCodes.Br, labels(whereto))
-
- case CJUMP(success, failure, cond, kind) =>
- // cond is TestOp (see Primitives.scala), and can take
- // values EQ, NE, LT, GE LE, GT
- // kind is TypeKind
- val isFloat = kind == FLOAT || kind == DOUBLE
- val emit = (c: TestOp, l: Label) => emitBr(c, l, isFloat)
- emitCondBr(block, cond, success, failure, next, emit)
-
- case CZJUMP(success, failure, cond, kind) =>
- emitCondBr(block, cond, success, failure, next, emitBrBool(_, _))
-
- case RETURN(kind) =>
- if (currentHandlers.isEmpty)
- mcode.Emit(OpCodes.Ret)
- else {
- val (local, label) = returnFromHandler(kind)
- if (kind != UNIT)
- mcode.Emit(OpCodes.Stloc, local)
- mcode.Emit(OpCodes.Leave, label)
- }
-
- case THROW(_) =>
- mcode.Emit(OpCodes.Throw)
-
- case DROP(kind) =>
- mcode.Emit(OpCodes.Pop)
-
- case DUP(kind) =>
- // see comment on `var previousWasNEW`
- if (!previousWasNEW)
- mcode.Emit(OpCodes.Dup)
- else
- previousWasNEW = false
-
- case MONITOR_ENTER() =>
- mcode.Emit(OpCodes.Call, MMONITOR_ENTER)
-
- case MONITOR_EXIT() =>
- mcode.Emit(OpCodes.Call, MMONITOR_EXIT)
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) | LOAD_EXCEPTION(_) =>
- ()
- }
-
- } // end for (instr <- b) { .. }
- } // end genBlock
-
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- kind match {
- // CHECK: is ist possible to get this for BOOL? in this case, verify.
- case BOOL | BYTE | CHAR | SHORT | INT | LONG | FLOAT | DOUBLE =>
- mcode.Emit(OpCodes.Neg)
-
- case _ => abort("Impossible to negate a " + kind)
- }
-
- case Arithmetic(op, kind) =>
- op match {
- case ADD => mcode.Emit(OpCodes.Add)
- case SUB => mcode.Emit(OpCodes.Sub)
- case MUL => mcode.Emit(OpCodes.Mul)
- case DIV => mcode.Emit(OpCodes.Div)
- case REM => mcode.Emit(OpCodes.Rem)
- case NOT => mcode.Emit(OpCodes.Not) //bitwise complement (one's complement)
- case _ => abort("Unknown arithmetic primitive " + primitive )
- }
-
- case Logical(op, kind) => op match {
- case AND => mcode.Emit(OpCodes.And)
- case OR => mcode.Emit(OpCodes.Or)
- case XOR => mcode.Emit(OpCodes.Xor)
- }
-
- case Shift(op, kind) => op match {
- case LSL => mcode.Emit(OpCodes.Shl)
- case ASR => mcode.Emit(OpCodes.Shr)
- case LSR => mcode.Emit(OpCodes.Shr_Un)
- }
-
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
-
- dst match {
- case BYTE => mcode.Emit(OpCodes.Conv_I1) // I1 for System.SByte, i.e. a scala.Byte
- case SHORT => mcode.Emit(OpCodes.Conv_I2)
- case CHAR => mcode.Emit(OpCodes.Conv_U2)
- case INT => mcode.Emit(OpCodes.Conv_I4)
- case LONG => mcode.Emit(OpCodes.Conv_I8)
- case FLOAT => mcode.Emit(OpCodes.Conv_R4)
- case DOUBLE => mcode.Emit(OpCodes.Conv_R8)
- case _ =>
- Console.println("Illegal conversion at: " + clasz +
- " at: " + pos.source + ":" + pos.line)
- }
-
- case ArrayLength(_) =>
- mcode.Emit(OpCodes.Ldlen)
-
- case StartConcat =>
- mcode.Emit(OpCodes.Newobj, MSTRING_BUILDER_CONSTR)
-
-
- case StringConcat(el) =>
- val elemType : MsilType = el match {
- case REFERENCE(_) | ARRAY(_) => MOBJECT
- case _ => msilType(el)
- }
-
- val argTypes:Array[MsilType] = Array(elemType)
- val stringBuilderAppend = MSTRING_BUILDER.GetMethod("Append", argTypes )
- mcode.Emit(OpCodes.Callvirt, stringBuilderAppend)
-
- case EndConcat =>
- mcode.Emit(OpCodes.Callvirt, MSTRING_BUILDER_TOSTRING)
-
- case _ =>
- abort("Unimplemented primitive " + primitive)
- }
- } // end genPrimitive
-
-
- ////////////////////// loading ///////////////////////
-
- def loadI4(value: Int, code: ILGenerator): Unit = value match {
- case -1 => code.Emit(OpCodes.Ldc_I4_M1)
- case 0 => code.Emit(OpCodes.Ldc_I4_0)
- case 1 => code.Emit(OpCodes.Ldc_I4_1)
- case 2 => code.Emit(OpCodes.Ldc_I4_2)
- case 3 => code.Emit(OpCodes.Ldc_I4_3)
- case 4 => code.Emit(OpCodes.Ldc_I4_4)
- case 5 => code.Emit(OpCodes.Ldc_I4_5)
- case 6 => code.Emit(OpCodes.Ldc_I4_6)
- case 7 => code.Emit(OpCodes.Ldc_I4_7)
- case 8 => code.Emit(OpCodes.Ldc_I4_8)
- case _ =>
- if (value >= -128 && value <= 127)
- code.Emit(OpCodes.Ldc_I4_S, value)
- else
- code.Emit(OpCodes.Ldc_I4, value)
- }
-
- def loadArg(code: ILGenerator, loadAddr: Boolean)(i: Int) =
- if (loadAddr) {
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldarga_S, i)
- else
- code.Emit(OpCodes.Ldarga, i)
- } else {
- i match {
- case 0 => code.Emit(OpCodes.Ldarg_0)
- case 1 => code.Emit(OpCodes.Ldarg_1)
- case 2 => code.Emit(OpCodes.Ldarg_2)
- case 3 => code.Emit(OpCodes.Ldarg_3)
- case _ =>
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldarg_S, i)
- else
- code.Emit(OpCodes.Ldarg, i)
- }
- }
-
- def loadLocal(i: Int, local: Local, code: ILGenerator, loadAddr: Boolean) =
- if (loadAddr) {
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldloca_S, localBuilders(local))
- else
- code.Emit(OpCodes.Ldloca, localBuilders(local))
- } else {
- i match {
- case 0 => code.Emit(OpCodes.Ldloc_0)
- case 1 => code.Emit(OpCodes.Ldloc_1)
- case 2 => code.Emit(OpCodes.Ldloc_2)
- case 3 => code.Emit(OpCodes.Ldloc_3)
- case _ =>
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldloc_S, localBuilders(local))
- else
- code.Emit(OpCodes.Ldloc, localBuilders(local))
- }
- }
-
- ////////////////////// branches ///////////////////////
-
- /** Returns a Triple (Boolean, Boolean, Option[Label])
- * - whether the jump leaves some exception block (try / catch / finally)
- * - whether it leaves a finally handler (finally block, but not it's try / catch)
- * - a label where to jump for leaving the finally handler
- * . None to leave directly using `endfinally`
- * . Some(label) to emit `leave label` (for try / catch inside a finally handler)
- */
- def leavesHandler(from: BasicBlock, to: BasicBlock): (Boolean, Boolean, Option[Label]) =
- if (currentHandlers.isEmpty) (false, false, None)
- else {
- val h = currentHandlers.head
- val leaveHead = { h.covers(from) != h.covers(to) ||
- h.blocks.contains(from) != h.blocks.contains(to) }
- if (leaveHead) {
- // we leave the innermost exception block.
- // find out if we also leave som e `finally` handler
- currentHandlers.find(e => {
- e.cls == NoSymbol && e.blocks.contains(from) != e.blocks.contains(to)
- }) match {
- case Some(finallyHandler) =>
- if (h == finallyHandler) {
- // the finally handler is the innermost, so we can emit `endfinally` directly
- (true, true, None)
- } else {
- // we need to `Leave` to the `endfinally` of the next outer finally handler
- val l = endFinallyLabels.getOrElseUpdate(finallyHandler, mcode.DefineLabel())
- (true, true, Some(l))
- }
- case None =>
- (true, false, None)
- }
- } else (false, false, None)
- }
-
- def emitCondBr(block: BasicBlock, cond: TestOp, success: BasicBlock, failure: BasicBlock,
- next: BasicBlock, emitBrFun: (TestOp, Label) => Unit) {
- val (sLeaveHandler, sLeaveFinally, slfTarget) = leavesHandler(block, success)
- val (fLeaveHandler, fLeaveFinally, flfTarget) = leavesHandler(block, failure)
-
- if (sLeaveHandler || fLeaveHandler) {
- val sLabelOpt = if (sLeaveHandler) {
- val leaveSLabel = mcode.DefineLabel()
- emitBrFun(cond, leaveSLabel)
- Some(leaveSLabel)
- } else {
- emitBrFun(cond, labels(success))
- None
- }
-
- if (fLeaveHandler) {
- if (fLeaveFinally) {
- if (flfTarget.isDefined) mcode.Emit(OpCodes.Leave, flfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(failure))
- } else
- mcode.Emit(OpCodes.Br, labels(failure))
-
- sLabelOpt.map(l => {
- mcode.MarkLabel(l)
- if (sLeaveFinally) {
- if (slfTarget.isDefined) mcode.Emit(OpCodes.Leave, slfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(success))
- })
- } else {
- if (next == success) {
- emitBrFun(cond.negate, labels(failure))
- } else {
- emitBrFun(cond, labels(success))
- if (next != failure) {
- mcode.Emit(OpCodes.Br, labels(failure))
- }
- }
- }
- }
-
- def emitBr(condition: TestOp, dest: Label, isFloat: Boolean) {
- condition match {
- case EQ => mcode.Emit(OpCodes.Beq, dest)
- case NE => mcode.Emit(OpCodes.Bne_Un, dest)
- case LT => mcode.Emit(if (isFloat) OpCodes.Blt_Un else OpCodes.Blt, dest)
- case GE => mcode.Emit(if (isFloat) OpCodes.Bge_Un else OpCodes.Bge, dest)
- case LE => mcode.Emit(if (isFloat) OpCodes.Ble_Un else OpCodes.Ble, dest)
- case GT => mcode.Emit(if (isFloat) OpCodes.Bgt_Un else OpCodes.Bgt, dest)
- }
- }
-
- def emitBrBool(cond: TestOp, dest: Label) {
- (cond: @unchecked) match {
- // EQ -> Brfalse, NE -> Brtrue; this is because we come from
- // a CZJUMP. If the value on the stack is 0 (e.g. a boolean
- // method returned false), and we are in the case EQ, then
- // we need to emit Brfalse (EQ Zero means false). vice versa
- case EQ => mcode.Emit(OpCodes.Brfalse, dest)
- case NE => mcode.Emit(OpCodes.Brtrue, dest)
- }
- }
-
- ////////////////////// local vars ///////////////////////
-
- /**
- * Compute the indexes of each local variable of the given
- * method.
- */
- def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1
-
- val params = m.params
- for (l <- params) {
- debuglog("Index value for parameter " + l + ": " + idx)
- l.index = idx
- idx += 1 // sizeOf(l.kind)
- }
-
- val locvars = m.locals filterNot (params contains)
- idx = 0
-
- for (l <- locvars) {
- debuglog("Index value for local variable " + l + ": " + idx)
- l.index = idx
- idx += 1 // sizeOf(l.kind)
- }
-
- }
-
- ////////////////////// Utilities ////////////////////////
-
- /** Return the a name of this symbol that can be used on the .NET
- * platform. It removes spaces from names.
- *
- * Special handling: scala.All and scala.AllRef are 'erased' to
- * scala.All$ and scala.AllRef$. This is needed because they are
- * not real classes, and they mean 'abrupt termination upon evaluation
- * of that expression' or 'null' respectively. This handling is
- * done already in GenICode, but here we need to remove references
- * from method signatures to these types, because such classes can
- * not exist in the classpath: the type checker will be very confused.
- */
- def msilName(sym: Symbol): String = {
- val suffix = sym.moduleSuffix
- // Flags.JAVA: "symbol was not defined by a scala-class" (java, or .net-class)
-
- if (sym == definitions.NothingClass)
- return "scala.runtime.Nothing$"
- else if (sym == definitions.NullClass)
- return "scala.runtime.Null$"
-
- (if (sym.isClass || (sym.isModule && !sym.isMethod)) {
- if (sym.isNestedClass) sym.simpleName
- else sym.fullName
- } else
- sym.simpleName.toString.trim()) + suffix
- }
-
-
- ////////////////////// flags ///////////////////////
-
- def msilTypeFlags(sym: Symbol): Int = {
- var mf: Int = TypeAttributes.AutoLayout | TypeAttributes.AnsiClass
-
- if(sym.isNestedClass) {
- mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NestedPrivate else TypeAttributes.NestedPublic)
- } else {
- mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NotPublic else TypeAttributes.Public)
- }
- mf = mf | (if (sym hasFlag Flags.ABSTRACT) TypeAttributes.Abstract else 0)
- mf = mf | (if (sym.isTrait && !sym.isImplClass) TypeAttributes.Interface else TypeAttributes.Class)
- mf = mf | (if (sym isFinal) TypeAttributes.Sealed else 0)
-
- sym.annotations foreach { a => a match {
- case AnnotationInfo(SerializableAttr, _, _) =>
- // TODO: add the Serializable TypeAttribute also if the annotation
- // System.SerializableAttribute is present (.net annotation, not scala)
- // Best way to do it: compare with
- // definitions.getClass("System.SerializableAttribute").tpe
- // when frontend available
- mf = mf | TypeAttributes.Serializable
- case _ => ()
- }}
-
- mf
- // static: not possible (or?)
- }
-
- def msilMethodFlags(sym: Symbol): Short = {
- var mf: Int = MethodAttributes.HideBySig |
- (if (sym hasFlag Flags.PRIVATE) MethodAttributes.Private
- else MethodAttributes.Public)
-
- if (!sym.isClassConstructor) {
- if (sym.isStaticMember)
- mf = mf | FieldAttributes.Static // coincidentally, same value as for MethodAttributes.Static ...
- else {
- mf = mf | MethodAttributes.Virtual
- if (sym.isFinal && !getType(sym.owner).IsInterface)
- mf = mf | MethodAttributes.Final
- if (sym.isDeferred || getType(sym.owner).IsInterface)
- mf = mf | MethodAttributes.Abstract
- }
- }
-
- if (sym.isStaticMember) {
- mf = mf | MethodAttributes.Static
- }
-
- // constructors of module classes should be private
- if (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) {
- mf |= MethodAttributes.Private
- mf &= ~(MethodAttributes.Public)
- }
-
- mf.toShort
- }
-
- def msilFieldFlags(sym: Symbol): Short = {
- var mf: Int =
- if (sym hasFlag Flags.PRIVATE) FieldAttributes.Private
- else if (sym hasFlag Flags.PROTECTED) FieldAttributes.FamORAssem
- else FieldAttributes.Public
-
- if (sym hasFlag Flags.FINAL)
- mf = mf | FieldAttributes.InitOnly
-
- if (sym.isStaticMember)
- mf = mf | FieldAttributes.Static
-
- // TRANSIENT: "not serialized", VOLATILE: doesn't exist on .net
- // TODO: add this annotation also if the class has the custom attribute
- // System.NotSerializedAttribute
- sym.annotations.foreach( a => a match {
- case AnnotationInfo(TransientAtt, _, _) =>
- mf = mf | FieldAttributes.NotSerialized
- case _ => ()
- })
-
- mf.toShort
- }
-
- ////////////////////// builders, types ///////////////////////
-
- var entryPoint: Symbol = _
-
- val notInitializedModules = mutable.HashSet[Symbol]()
-
- // TODO: create fields also in def createType, and not in genClass,
- // add a getField method (it only works as it is because fields never
- // accessed from outside a class)
-
- val localBuilders = mutable.HashMap[Local, LocalBuilder]()
-
- private[GenMSIL] def findEntryPoint(cls: IClass) {
-
- def isEntryPoint(sym: Symbol):Boolean = {
- if (isStaticModule(sym.owner) && msilName(sym) == "main")
- if (sym.tpe.paramTypes.length == 1) {
- toTypeKind(sym.tpe.paramTypes(0)) match {
- case ARRAY(elem) =>
- if (elem.toType.typeSymbol == definitions.StringClass) {
- return true
- }
- case _ => ()
- }
- }
- false
- }
-
- if((entryPoint == null) && opt.showClass.isDefined) { // TODO introduce dedicated setting instead
- val entryclass = opt.showClass.get.toString
- val cfn = cls.symbol.fullName
- if(cfn == entryclass) {
- for (m <- cls.methods; if isEntryPoint(m.symbol)) { entryPoint = m.symbol }
- if(entryPoint == null) { warning("Couldn't find main method in class " + cfn) }
- }
- }
-
- if (firstSourceName == "")
- if (cls.symbol.sourceFile != null) // is null for nested classes
- firstSourceName = cls.symbol.sourceFile.name
- }
-
- // #####################################################################
- // get and create types
-
- private def msilType(t: TypeKind): MsilType = (t: @unchecked) match {
- case UNIT => MVOID
- case BOOL => MBOOL
- case BYTE => MBYTE
- case SHORT => MSHORT
- case CHAR => MCHAR
- case INT => MINT
- case LONG => MLONG
- case FLOAT => MFLOAT
- case DOUBLE => MDOUBLE
- case REFERENCE(cls) => getType(cls)
- case ARRAY(elem) =>
- msilType(elem) match {
- // For type builders, cannot call "clrTypes.mkArrayType" because this looks up
- // the type "tp" in the assembly (not in the HashMap "types" of the backend).
- // This can fail for nested types because the builders are not complete yet.
- case tb: TypeBuilder => tb.MakeArrayType()
- case tp: MsilType => clrTypes.mkArrayType(tp)
- }
- }
-
- private def msilType(tpe: Type): MsilType = msilType(toTypeKind(tpe))
-
- private def msilParamTypes(sym: Symbol): Array[MsilType] = {
- sym.tpe.paramTypes.map(msilType).toArray
- }
-
- def getType(sym: Symbol) = getTypeOpt(sym).getOrElse(abort(showsym(sym)))
-
- /**
- * Get an MSIL type from a symbol. First look in the clrTypes.types map, then
- * lookup the name using clrTypes.getType
- */
- def getTypeOpt(sym: Symbol): Option[MsilType] = {
- val tmp = types.get(sym)
- tmp match {
- case typ @ Some(_) => typ
- case None =>
- def typeString(sym: Symbol): String = {
- val s = if (sym.isNestedClass) typeString(sym.owner) +"+"+ sym.simpleName
- else sym.fullName
- if (sym.isModuleClass && !sym.isTrait) s + "$" else s
- }
- val name = typeString(sym)
- val typ = clrTypes.getType(name)
- if (typ == null)
- None
- else {
- types(sym) = typ
- Some(typ)
- }
- }
- }
-
- def mapType(sym: Symbol, mType: MsilType) {
- assert(mType != null, showsym(sym))
- types(sym) = mType
- }
-
- def createTypeBuilder(iclass: IClass) {
- /**
- * First look in the clrTypes.types map, if that fails check if it's a class being compiled, otherwise
- * lookup by name (clrTypes.getType calls the static method msil.Type.GetType(fullname)).
- */
- def msilTypeFromSym(sym: Symbol): MsilType = {
- types.get(sym).getOrElse {
- classes.get(sym) match {
- case Some(iclass) =>
- msilTypeBuilderFromSym(sym)
- case None =>
- getType(sym)
- }
- }
- }
-
- def msilTypeBuilderFromSym(sym: Symbol): TypeBuilder = {
- if(!(types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])){
- val iclass = classes(sym)
- assert(iclass != null)
- createTypeBuilder(iclass)
- }
- types(sym).asInstanceOf[TypeBuilder]
- }
-
- val sym = iclass.symbol
- if (types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])
- return
-
- def isInterface(s: Symbol) = s.isTrait && !s.isImplClass
- val parents: List[Type] =
- if (sym.info.parents.isEmpty) List(definitions.ObjectClass.tpe)
- else sym.info.parents.distinct
-
- val superType : MsilType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol)
- debuglog("super type: " + parents(0).typeSymbol + ", msil type: " + superType)
-
- val interfaces: Array[MsilType] =
- parents.tail.map(p => msilTypeFromSym(p.typeSymbol)).toArray
- if (parents.length > 1) {
- if (settings.debug.value) {
- log("interfaces:")
- for (i <- 0.until(interfaces.length)) {
- log(" type: " + parents(i + 1).typeSymbol + ", msil type: " + interfaces(i))
- }
- }
- }
-
- val tBuilder = if (sym.isNestedClass) {
- val ownerT = msilTypeBuilderFromSym(sym.owner).asInstanceOf[TypeBuilder]
- ownerT.DefineNestedType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
- } else {
- mmodule.DefineType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
- }
- mapType(sym, tBuilder)
- } // createTypeBuilder
-
- def createClassMembers(iclass: IClass) {
- try {
- createClassMembers0(iclass)
- }
- catch {
- case e: Throwable =>
- java.lang.System.err.println(showsym(iclass.symbol))
- java.lang.System.err.println("with methods = " + iclass.methods)
- throw e
- }
- }
-
- def createClassMembers0(iclass: IClass) {
-
- val mtype = getType(iclass.symbol).asInstanceOf[TypeBuilder]
-
- for (ifield <- iclass.fields) {
- val sym = ifield.symbol
- debuglog("Adding field: " + sym.fullName)
-
- var attributes = msilFieldFlags(sym)
- val fieldTypeWithCustomMods =
- new PECustomMod(msilType(sym.tpe),
- customModifiers(sym.annotations))
- val fBuilder = mtype.DefineField(msilName(sym),
- fieldTypeWithCustomMods,
- attributes)
- fields(sym) = fBuilder
- addAttributes(fBuilder, sym.annotations)
- } // all iclass.fields iterated over
-
- if (isStaticModule(iclass.symbol)) {
- val sc = iclass.lookupStaticCtor
- if (sc.isDefined) {
- val m = sc.get
- val oldLastBlock = m.lastBlock
- val lastBlock = m.newBlock()
- oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
- // call object's private ctor from static ctor
- lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor))
- lastBlock.emit(DROP(toTypeKind(iclass.symbol.tpe)))
- lastBlock emit RETURN(UNIT)
- lastBlock.close
- }
- }
-
- if (iclass.symbol != definitions.ArrayClass) {
- for (m: IMethod <- iclass.methods) {
- val sym = m.symbol
- debuglog("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
- sym.owner.fullName + "::" + sym.name)
-
- val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder]
- assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType)
- var paramTypes = msilParamTypes(sym)
- val attr = msilMethodFlags(sym)
-
- if (m.symbol.isClassConstructor) {
- val constr =
- ownerType.DefineConstructor(attr, CallingConventions.Standard, paramTypes)
- for (i <- 0.until(paramTypes.length)) {
- constr.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
- }
- mapConstructor(sym, constr)
- addAttributes(constr, sym.annotations)
- } else {
- var resType = msilType(m.returnType)
- val method =
- ownerType.DefineMethod(msilName(sym), attr, resType, paramTypes)
- for (i <- 0.until(paramTypes.length)) {
- method.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
- }
- if (!methods.contains(sym))
- mapMethod(sym, method)
- addAttributes(method, sym.annotations)
- debuglog("\t created MethodBuilder " + method)
- }
- }
- } // method builders created for non-array iclass
-
- if (isStaticModule(iclass.symbol)) {
- addModuleInstanceField(iclass.symbol)
- notInitializedModules += iclass.symbol
- if (iclass.lookupStaticCtor.isEmpty) {
- addStaticInit(iclass.symbol)
- }
- }
-
- } // createClassMembers0
-
- private def isTopLevelModule(sym: Symbol): Boolean =
- beforeRefchecks {
- sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
- }
-
- // if the module is lifted it does not need to be initialized in
- // its static constructor, and the MODULE$ field is not required.
- // the outer class will care about it.
- private def isStaticModule(sym: Symbol): Boolean = {
- // .net inner classes: removed '!sym.hasFlag(Flags.LIFTED)', added
- // 'sym.isStatic'. -> no longer compatible without skipping flatten!
- sym.isModuleClass && sym.isStatic && !sym.isImplClass
- }
-
- private def isCloneable(sym: Symbol): Boolean = {
- !sym.annotations.forall( a => a match {
- case AnnotationInfo(CloneableAttr, _, _) => false
- case _ => true
- })
- }
-
- private def addModuleInstanceField(sym: Symbol) {
- debuglog("Adding Module-Instance Field for " + showsym(sym))
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
- val fb = tBuilder.DefineField(MODULE_INSTANCE_NAME,
- tBuilder,
- (FieldAttributes.Public |
- //FieldAttributes.InitOnly |
- FieldAttributes.Static).toShort)
- fields(sym) = fb
- }
-
-
- // the symbol may be a object-symbol (module-symbol), or a module-class-symbol
- private def getModuleInstanceField(sym: Symbol): FieldInfo = {
- assert(sym.isModule || sym.isModuleClass, "Expected module: " + showsym(sym))
-
- // when called by LOAD_MODULE, the corresponding type maybe doesn't
- // exist yet -> make a getType
- val moduleClassSym = if (sym.isModule) sym.moduleClass else sym
-
- // TODO: get module field for modules not defined in the
- // source currently compiling (e.g. Console)
-
- fields get moduleClassSym match {
- case Some(sym) => sym
- case None =>
- //val mclass = types(moduleClassSym)
- val nameInMetadata = nestingAwareFullClassname(moduleClassSym)
- val mClass = clrTypes.getType(nameInMetadata)
- val mfield = mClass.GetField("MODULE$")
- assert(mfield ne null, "module not found " + showsym(moduleClassSym))
- fields(moduleClassSym) = mfield
- mfield
- }
-
- //fields(moduleClassSym)
- }
-
- def nestingAwareFullClassname(csym: Symbol) : String = {
- val suffix = csym.moduleSuffix
- val res = if (csym.isNestedClass)
- nestingAwareFullClassname(csym.owner) + "+" + csym.encodedName
- else
- csym.fullName
- res + suffix
- }
-
- /** Adds a static initializer which creates an instance of the module
- * class (calls the primary constructor). A special primary constructor
- * will be generated (notInitializedModules) which stores the new instance
- * in the MODULE$ field right after the super call.
- */
- private def addStaticInit(sym: Symbol) {
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
-
- val staticInit = tBuilder.DefineConstructor(
- (MethodAttributes.Static | MethodAttributes.Public).toShort,
- CallingConventions.Standard,
- MsilType.EmptyTypes)
-
- val sicode = staticInit.GetILGenerator()
-
- val instanceConstructor = constructors(sym.primaryConstructor)
-
- // there are no constructor parameters. assuming the constructor takes no parameter
- // is fine: we call (in the static constructor) the constructor of the module class,
- // which takes no arguments - an object definition cannot take constructor arguments.
- sicode.Emit(OpCodes.Newobj, instanceConstructor)
- // the stsfld is done in the instance constructor, just after the super call.
- sicode.Emit(OpCodes.Pop)
-
- sicode.Emit(OpCodes.Ret)
- }
-
- private def generateMirrorClass(sym: Symbol) {
- val tBuilder = getType(sym)
- assert(sym.isModuleClass, "Can't generate Mirror-Class for the Non-Module class " + sym)
- debuglog("Dumping mirror class for object: " + sym)
- val moduleName = msilName(sym)
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val mirrorTypeBuilder = mmodule.DefineType(mirrorName,
- TypeAttributes.Class |
- TypeAttributes.Public |
- TypeAttributes.Sealed,
- MOBJECT,
- MsilType.EmptyTypes)
-
- val iclass = classes(sym)
-
- for (m <- sym.tpe.nonPrivateMembers
- if m.owner != definitions.ObjectClass && !m.isProtected &&
- m.isMethod && !m.isClassConstructor && !m.isStaticMember && !m.isCase &&
- !m.isDeferred)
- {
- debuglog(" Mirroring method: " + m)
- val paramTypes = msilParamTypes(m)
- val paramNames: Array[String] = new Array[String](paramTypes.length)
- for (i <- 0 until paramTypes.length)
- paramNames(i) = "x_" + i
-
- // CHECK: verify if getMethodName is better than msilName
- val mirrorMethod = mirrorTypeBuilder.DefineMethod(msilName(m),
- (MethodAttributes.Public |
- MethodAttributes.Static).toShort,
- msilType(m.tpe.resultType),
- paramTypes)
-
- var i = 0
- while (i < paramTypes.length) {
- mirrorMethod.DefineParameter(i, ParameterAttributes.None, paramNames(i))
- i += 1
- }
-
- val mirrorCode = mirrorMethod.GetILGenerator()
- mirrorCode.Emit(OpCodes.Ldsfld, getModuleInstanceField(sym))
- val mInfo = getMethod(m)
- for (paramidx <- 0.until(paramTypes.length)) {
- val mInfoParams = mInfo.GetParameters
- val loadAddr = mInfoParams(paramidx).ParameterType.IsByRef
- loadArg(mirrorCode, loadAddr)(paramidx)
- }
-
- mirrorCode.Emit(OpCodes.Callvirt, getMethod(m))
- mirrorCode.Emit(OpCodes.Ret)
- }
-
- addSymtabAttribute(sym.sourceModule, mirrorTypeBuilder)
-
- mirrorTypeBuilder.CreateType()
- mirrorTypeBuilder.setSourceFilepath(iclass.cunit.source.file.path)
- }
-
-
- // #####################################################################
- // delegate callers
-
- var delegateCallers: TypeBuilder = _
- var nbDelegateCallers: Int = 0
-
- private def initDelegateCallers() = {
- delegateCallers = mmodule.DefineType("$DelegateCallers", TypeAttributes.Public |
- TypeAttributes.Sealed)
- }
-
- private def createDelegateCaller(functionType: Type, delegateType: Type) = {
- if (delegateCallers == null)
- initDelegateCallers()
- // create a field an store the function-object
- val mFunctionType: MsilType = msilType(functionType)
- val anonfunField: FieldBuilder = delegateCallers.DefineField(
- "$anonfunField$$" + nbDelegateCallers, mFunctionType,
- (FieldAttributes.InitOnly | FieldAttributes.Public | FieldAttributes.Static).toShort)
- mcode.Emit(OpCodes.Stsfld, anonfunField)
-
-
- // create the static caller method and the delegate object
- val (params, returnType) = delegateType.member(nme.apply).tpe match {
- case MethodType(delParams, delReturn) => (delParams, delReturn)
- case _ => abort("not a delegate type: " + delegateType)
- }
- val caller: MethodBuilder = delegateCallers.DefineMethod(
- "$delegateCaller$$" + nbDelegateCallers,
- (MethodAttributes.Final | MethodAttributes.Public | MethodAttributes.Static).toShort,
- msilType(returnType), (params map (_.tpe)).map(msilType).toArray)
- for (i <- 0 until params.length)
- caller.DefineParameter(i, ParameterAttributes.None, "arg" + i) // FIXME: use name of parameter symbol
- val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
- mcode.Emit(OpCodes.Ldnull)
- mcode.Emit(OpCodes.Ldftn, caller)
- mcode.Emit(OpCodes.Newobj, delegCtor)
-
-
- // create the static caller method body
- val functionApply: MethodInfo = getMethod(functionType.member(nme.apply))
- val dcode: ILGenerator = caller.GetILGenerator()
- dcode.Emit(OpCodes.Ldsfld, anonfunField)
- for (i <- 0 until params.length) {
- loadArg(dcode, false /* TODO confirm whether passing actual as-is to formal is correct wrt the ByRef attribute of the param */)(i)
- emitBox(dcode, toTypeKind(params(i).tpe))
- }
- dcode.Emit(OpCodes.Callvirt, functionApply)
- emitUnbox(dcode, toTypeKind(returnType))
- dcode.Emit(OpCodes.Ret)
-
- nbDelegateCallers = nbDelegateCallers + 1
-
- } //def createDelegateCaller
-
- def emitBox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
- // doesn't make sense, unit as parameter..
- case UNIT => code.Emit(OpCodes.Ldsfld, boxedUnit)
- case BOOL | BYTE | SHORT | CHAR | INT | LONG | FLOAT | DOUBLE =>
- code.Emit(OpCodes.Box, msilType(boxType))
- case REFERENCE(cls) if clrTypes.isValueType(cls) =>
- code.Emit(OpCodes.Box, (msilType(boxType)))
- case REFERENCE(_) | ARRAY(_) =>
- warning("Tried to BOX a non-valuetype.")
- ()
- }
-
- def emitUnbox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
- case UNIT => code.Emit(OpCodes.Pop)
- /* (1) it's essential to keep the code emitted here (as of now plain calls to System.Convert.ToBlaBla methods)
- behaviorally.equiv.wrt. BoxesRunTime.unboxToBlaBla methods
- (case null: that's easy, case boxed: track changes to unboxBlaBla)
- (2) See also: asInstanceOf to cast from Any to number,
- tracked in http://lampsvn.epfl.ch/trac/scala/ticket/4437 */
- case BOOL => code.Emit(OpCodes.Call, toBool)
- case BYTE => code.Emit(OpCodes.Call, toSByte)
- case SHORT => code.Emit(OpCodes.Call, toShort)
- case CHAR => code.Emit(OpCodes.Call, toChar)
- case INT => code.Emit(OpCodes.Call, toInt)
- case LONG => code.Emit(OpCodes.Call, toLong)
- case FLOAT => code.Emit(OpCodes.Call, toFloat)
- case DOUBLE => code.Emit(OpCodes.Call, toDouble)
- case REFERENCE(cls) if clrTypes.isValueType(cls) =>
- code.Emit(OpCodes.Unbox, msilType(boxType))
- code.Emit(OpCodes.Ldobj, msilType(boxType))
- case REFERENCE(_) | ARRAY(_) =>
- warning("Tried to UNBOX a non-valuetype.")
- ()
- }
-
- // #####################################################################
- // get and create methods / constructors
-
- def getConstructor(sym: Symbol): ConstructorInfo = constructors.get(sym) match {
- case Some(constr) => constr
- case None =>
- val mClass = getType(sym.owner)
- val constr = mClass.GetConstructor(msilParamTypes(sym))
- if (constr eq null) {
- java.lang.System.out.println("Cannot find constructor " + sym.owner + "::" + sym.name)
- java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
- abort(sym.fullName)
- }
- else {
- mapConstructor(sym, constr)
- constr
- }
- }
-
- def mapConstructor(sym: Symbol, cInfo: ConstructorInfo) = {
- constructors(sym) = cInfo
- }
-
- private def getMethod(sym: Symbol): MethodInfo = {
-
- methods.get(sym) match {
- case Some(method) => method
- case None =>
- val mClass = getType(sym.owner)
- try {
- val method = mClass.GetMethod(msilName(sym), msilParamTypes(sym),
- msilType(sym.tpe.resultType))
- if (method eq null) {
- java.lang.System.out.println("Cannot find method " + sym.owner + "::" + msilName(sym))
- java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
- abort(sym.fullName)
- }
- else {
- mapMethod(sym, method)
- method
- }
- }
- catch {
- case e: Exception =>
- Console.println("While looking up " + mClass + "::" + sym.nameString)
- Console.println("\t" + showsym(sym))
- throw e
- }
- }
- }
-
- /*
- * add a mapping between sym and mInfo
- */
- private def mapMethod(sym: Symbol, mInfo: MethodInfo) {
- assert (mInfo != null, mInfo)
- methods(sym) = mInfo
- }
-
- /*
- * add mapping between sym and method with newName, paramTypes of newClass
- */
- private def mapMethod(sym: Symbol, newClass: MsilType, newName: String, paramTypes: Array[MsilType]) {
- val methodInfo = newClass.GetMethod(newName, paramTypes)
- assert(methodInfo != null, "Can't find mapping for " + sym + " -> " +
- newName + "(" + paramTypes + ")")
- mapMethod(sym, methodInfo)
- if (methodInfo.IsStatic)
- dynToStatMapped += sym
- }
-
- /*
- * add mapping between method with name and paramTypes of clazz to
- * method with newName and newParamTypes of newClass (used for instance
- * for "wait")
- */
- private def mapMethod(
- clazz: Symbol, name: Name, paramTypes: Array[Type],
- newClass: MsilType, newName: String, newParamTypes: Array[MsilType]) {
- val methodSym = lookupMethod(clazz, name, paramTypes)
- assert(methodSym != null, "cannot find method " + name + "(" +
- paramTypes + ")" + " in class " + clazz)
- mapMethod(methodSym, newClass, newName, newParamTypes)
- }
-
- /*
- * add mapping for member with name and paramTypes to member
- * newName of newClass (same parameters)
- */
- private def mapMethod(
- clazz: Symbol, name: Name, paramTypes: Array[Type],
- newClass: MsilType, newName: String) {
- mapMethod(clazz, name, paramTypes, newClass, newName, paramTypes map msilType)
- }
-
- /*
- * add mapping for all methods with name of clazz to the corresponding
- * method (same parameters) with newName of newClass
- */
- private def mapMethod(
- clazz: Symbol, name: Name,
- newClass: MsilType, newName: String) {
- val memberSym: Symbol = clazz.tpe.member(name)
- memberSym.tpe match {
- // alternatives: List[Symbol]
- case OverloadedType(_, alternatives) =>
- alternatives.foreach(s => mapMethod(s, newClass, newName, msilParamTypes(s)))
-
- // paramTypes: List[Type], resType: Type
- case MethodType(params, resType) =>
- mapMethod(memberSym, newClass, newName, msilParamTypes(memberSym))
-
- case _ =>
- abort("member not found: " + clazz + ", " + name)
- }
- }
-
-
- /*
- * find the method in clazz with name and paramTypes
- */
- private def lookupMethod(clazz: Symbol, name: Name, paramTypes: Array[Type]): Symbol = {
- val memberSym = clazz.tpe.member(name)
- memberSym.tpe match {
- case OverloadedType(_, alternatives) =>
- alternatives.find(s => {
- var i: Int = 0
- var typesOK: Boolean = true
- if (paramTypes.length == s.tpe.paramTypes.length) {
- while(i < paramTypes.length) {
- if (paramTypes(i) != s.tpe.paramTypes(i))
- typesOK = false
- i += 1
- }
- } else {
- typesOK = false
- }
- typesOK
- }) match {
- case Some(sym) => sym
- case None => abort("member of " + clazz + ", " + name + "(" +
- paramTypes + ") not found")
- }
-
- case MethodType(_, _) => memberSym
-
- case _ => abort("member not found: " + name + " of " + clazz)
- }
- }
-
- private def showsym(sym: Symbol): String = (sym.toString +
- "\n symbol = " + Flags.flagsToString(sym.flags) + " " + sym +
- "\n owner = " + Flags.flagsToString(sym.owner.flags) + " " + sym.owner
- )
-
- } // class BytecodeGenerator
-
-} // class GenMSIL
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 23f932b5b4..8f439fc800 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package backend.opt
import scala.tools.nsc.backend.icode.analysis.LubException
-import scala.tools.nsc.symtab._
/**
* @author Iulian Dragos
@@ -97,7 +96,7 @@ abstract class ClosureElimination extends SubComponent {
/* Some embryonic copy propagation. */
def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
cpp.init(m)
- cpp.run
+ cpp.run()
m.linearizedBlocks() foreach { bb =>
var info = cpp.in(bb)
@@ -109,7 +108,7 @@ abstract class ClosureElimination extends SubComponent {
val t = info.getBinding(l)
t match {
case Deref(This) | Const(_) =>
- bb.replaceInstruction(i, valueToInstruction(t));
+ bb.replaceInstruction(i, valueToInstruction(t))
debuglog(s"replaced $i with $t")
case _ =>
@@ -120,7 +119,7 @@ abstract class ClosureElimination extends SubComponent {
case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
def replaceFieldAccess(r: Record) {
- val Record(cls, bindings) = r
+ val Record(cls, _) = r
info.getFieldNonRecordValue(r, f) foreach { v =>
bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
debuglog(s"replaced $i with $v")
@@ -188,28 +187,20 @@ abstract class ClosureElimination extends SubComponent {
case Boxed(LocalVar(v)) =>
LOAD_LOCAL(v)
}
-
- /** is field 'f' accessible from method 'm'? */
- def accessible(f: Symbol, m: Symbol): Boolean =
- f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass))
} /* class ClosureElim */
/** Peephole optimization. */
abstract class PeepholeOpt {
-
- private var method: IMethod = NoIMethod
-
/** Concrete implementations will perform their optimizations here */
def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
var liveness: global.icodes.liveness.LivenessAnalysis = null
def apply(m: IMethod): Unit = if (m.hasCode) {
- method = m
liveness = new global.icodes.liveness.LivenessAnalysis
liveness.init(m)
- liveness.run
+ liveness.run()
m foreachBlock transformBlock
}
@@ -235,7 +226,7 @@ abstract class ClosureElimination extends SubComponent {
h = t.head
t = t.tail
}
- } while (redo);
+ } while (redo)
b fromList newInstructions
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 1beed3f420..a9c8104e79 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package backend.opt
import scala.collection.{ mutable, immutable }
-import symtab._
/**
*/
@@ -55,7 +54,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
+ val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
@@ -83,7 +82,7 @@ abstract class DeadCodeElimination extends SubComponent {
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
- debuglog("dead code elimination on " + m);
+ debuglog("dead code elimination on " + m)
dropOf.clear()
localStores.clear()
clobbers.clear()
@@ -105,17 +104,17 @@ abstract class DeadCodeElimination extends SubComponent {
/** collect reaching definitions and initial useful instructions for this method. */
def collectRDef(m: IMethod): Unit = if (m.hasCode) {
- defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
- rdef.init(m);
- rdef.run;
+ defs = immutable.HashMap.empty; worklist.clear(); useful.clear()
+ rdef.init(m)
+ rdef.run()
m foreachBlock { bb =>
useful(bb) = new mutable.BitSet(bb.size)
- var rd = rdef.in(bb);
+ var rd = rdef.in(bb)
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
// utility for adding to worklist
- def moveToWorkList() = moveToWorkListIf(true)
+ def moveToWorkList() = moveToWorkListIf(cond = true)
// utility for (conditionally) adding to worklist
def moveToWorkListIf(cond: Boolean) =
@@ -131,7 +130,7 @@ abstract class DeadCodeElimination extends SubComponent {
case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
- moveToWorkListIf(false)
+ moveToWorkListIf(cond = false)
case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
@@ -182,8 +181,10 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
moveToWorkListIf(necessary)
+ case LOAD_MODULE(sym) if isLoadNeeded(sym) =>
+ moveToWorkList() // SI-4859 Module initialization might side-effect.
case _ => ()
- moveToWorkListIf(false)
+ moveToWorkListIf(cond = false)
}
rd = rdef.interpret(bb, idx, rd)
}
@@ -339,8 +340,8 @@ abstract class DeadCodeElimination extends SubComponent {
m foreachBlock { bb =>
debuglog(bb + ":")
val oldInstr = bb.toList
- bb.open
- bb.clear
+ bb.open()
+ bb.clear()
for (Pair(i, idx) <- oldInstr.zipWithIndex) {
if (useful(bb)(idx)) {
debuglog(" * " + i + " is useful")
@@ -373,7 +374,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- if (bb.nonEmpty) bb.close
+ if (bb.nonEmpty) bb.close()
else log(s"empty block encountered in $m")
}
}
@@ -412,13 +413,6 @@ abstract class DeadCodeElimination extends SubComponent {
compensations
}
- private def withClosed[a](bb: BasicBlock)(f: => a): a = {
- if (bb.nonEmpty) bb.close
- val res = f
- if (bb.nonEmpty) bb.open
- res
- }
-
private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index ab238af239..dcf0590951 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -4,7 +4,6 @@
package scala.tools.nsc
package backend.opt
-import scala.util.control.Breaks._
/**
* This optimization phase inlines the exception handlers so that further phases can optimize the code better
@@ -53,7 +52,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
import icodes._
import icodes.opcodes._
- val phaseName = "inlineExceptionHandlers"
+ val phaseName = "inlinehandlers"
/** Create a new phase */
override def newPhase(p: Phase) = new InlineExceptionHandlersPhase(p)
@@ -70,9 +69,9 @@ abstract class InlineExceptionHandlers extends SubComponent {
* -some exception handler duplicates expect the exception on the stack while others expect it in a local
* => Option[Local]
*/
- private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]
+ private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]()
/* This map is the inverse of handlerCopies, used to compute the stack of duplicate blocks */
- private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]
+ private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]()
private def handlerLocal(bb: BasicBlock): Option[Local] =
for (v <- handlerCopies get bb ; (local, block) <- v ; l <- local) yield l
@@ -263,7 +262,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
if (analyzedMethod eq NoIMethod) {
analyzedMethod = bblock.method
tfa.init(bblock.method)
- tfa.run
+ tfa.run()
log(" performed tfa on method: " + bblock.method)
for (block <- bblock.method.blocks.sortBy(_.label))
@@ -358,7 +357,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
}
val caughtException = toTypeKind(caughtClass.tpe)
// copy the exception handler code once again, dropping the LOAD_EXCEPTION
- val copy = handler.code.newBlock
+ val copy = handler.code.newBlock()
copy.emitOnly((handler.iterator drop dropCount).toSeq: _*)
// extend the handlers of the handler to the copy
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 521b6cc132..1a73764719 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -50,6 +50,7 @@ abstract class Inliners extends SubComponent {
val phaseName = "inliner"
/** Debug - for timing the inliner. */
+ /****
private def timed[T](s: String, body: => T): T = {
val t1 = System.currentTimeMillis()
val res = body
@@ -60,6 +61,7 @@ abstract class Inliners extends SubComponent {
res
}
+ ****/
/** Look up implementation of method 'sym in 'clazz'.
*/
@@ -193,7 +195,7 @@ abstract class Inliners extends SubComponent {
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
- private def ownedName(sym: Symbol): String = afterUncurry {
+ private def ownedName(sym: Symbol): String = exitingUncurry {
val count = (
if (!sym.isMethod) 1
else if (sym.owner.isAnonymousFunction) 3
@@ -230,7 +232,7 @@ abstract class Inliners extends SubComponent {
val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) })
var a: analysis.MethodTFA = null
- if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
+ if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run() }
if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
@@ -240,7 +242,7 @@ abstract class Inliners extends SubComponent {
def clearCaches() {
// methods
NonPublicRefs.usesNonPublics.clear()
- recentTFAs.clear
+ recentTFAs.clear()
tfa.knownUnsafe.clear()
tfa.knownSafe.clear()
tfa.knownNever.clear()
@@ -279,7 +281,7 @@ abstract class Inliners extends SubComponent {
}
val tfa = new analysis.MTFAGrowable()
- tfa.stat = global.opt.printStats
+ tfa.stat = global.settings.Ystatistics.value
val staleOut = new mutable.ListBuffer[BasicBlock]
val splicedBlocks = mutable.Set.empty[BasicBlock]
val staleIn = mutable.Set.empty[BasicBlock]
@@ -320,8 +322,8 @@ abstract class Inliners extends SubComponent {
if (settings.debug.value)
inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
- var sizeBeforeInlining = m.code.blockCount
- var instrBeforeInlining = m.code.instructionCount
+ val sizeBeforeInlining = m.code.blockCount
+ val instrBeforeInlining = m.code.instructionCount
var retry = false
var count = 0
@@ -361,7 +363,7 @@ abstract class Inliners extends SubComponent {
assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
inlineCount += 1
- break
+ break()
}
}
}
@@ -477,9 +479,9 @@ abstract class Inliners extends SubComponent {
* As a whole, both `preInline()` invocations amount to priming the inlining process,
* so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start.
*/
- val totalPreInlines = {
- val firstRound = preInline(true)
- if(firstRound == 0) 0 else (firstRound + preInline(false))
+ /*val totalPreInlines = */ { // Val name commented out to emphasize it is never used
+ val firstRound = preInline(isFirstRound = true)
+ if(firstRound == 0) 0 else (firstRound + preInline(isFirstRound = false))
}
staleOut.clear()
splicedBlocks.clear()
@@ -511,7 +513,7 @@ abstract class Inliners extends SubComponent {
for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
- break
+ break()
}
}
}
@@ -566,10 +568,9 @@ abstract class Inliners extends SubComponent {
warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.originalName.decode)
}
- m.normalize
+ m.normalize()
if (sizeBeforeInlining > 0) {
val instrAfterInlining = m.code.instructionCount
- val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else ""
val inlinings = caller.inlinedCalls
if (inlinings > 0) {
val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining"
@@ -584,7 +585,7 @@ abstract class Inliners extends SubComponent {
private def isHigherOrderMethod(sym: Symbol) = (
sym.isMethod
- && beforeExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
+ && enteringExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
)
/** Should method 'sym' being called in 'receiver' be loaded from disk? */
@@ -601,7 +602,6 @@ abstract class Inliners extends SubComponent {
override def toString = m.toString
val sym = m.symbol
- val name = sym.name
def owner = sym.owner
def paramTypes = sym.info.paramTypes
def minimumStack = paramTypes.length + 1
@@ -617,13 +617,11 @@ abstract class Inliners extends SubComponent {
def length = blocks.length
def openBlocks = blocks filterNot (_.closed)
def instructions = m.code.instructions
- // def linearized = linearizer linearize m
def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
def isLarge = length > MAX_INLINE_SIZE
def isRecursive = m.recursive
def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
- def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp))
def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
def hasNonFinalizerHandler = handlers exists {
@@ -731,7 +729,6 @@ abstract class Inliners extends SubComponent {
*/
sealed abstract class InlineSafetyInfo {
def isSafe = false
- def isUnsafe = !isSafe
}
case object NeverSafeToInline extends InlineSafetyInfo
case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true }
@@ -814,7 +811,7 @@ abstract class Inliners extends SubComponent {
/** Add a new block in the current context. */
def newBlock() = {
- val b = caller.m.code.newBlock
+ val b = caller.m.code.newBlock()
activeHandlers foreach (_ addCoveredBlock b)
if (retVal ne null) b.varsInScope += retVal
b.varsInScope += inlinedThis
@@ -872,7 +869,7 @@ abstract class Inliners extends SubComponent {
r
case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor =>
- CALL_METHOD(meth, Static(true))
+ CALL_METHOD(meth, Static(onInstance = true))
case _ => i.clone()
}
@@ -893,8 +890,8 @@ abstract class Inliners extends SubComponent {
}
// re-emit the instructions before the call
- block.open
- block.clear
+ block.open()
+ block.clear()
block emit instrBefore
// store the arguments into special locals
@@ -903,7 +900,7 @@ abstract class Inliners extends SubComponent {
// jump to the start block of the callee
blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
- block.close
+ block.close()
// duplicate the other blocks in the callee
val calleeLin = inc.m.linearizedBlocks()
@@ -926,11 +923,11 @@ abstract class Inliners extends SubComponent {
emitInlined(map(i))
info = if(hasRETURN) a.interpret(info, i) else null
}
- inlinedBlock(bb).close
+ inlinedBlock(bb).close()
}
afterBlock emit instrAfter
- afterBlock.close
+ afterBlock.close()
staleIn += afterBlock
splicedBlocks ++= (calleeLin map inlinedBlock)
@@ -972,7 +969,7 @@ abstract class Inliners extends SubComponent {
}
if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
- tfa.knownUnsafe += inc.sym;
+ tfa.knownUnsafe += inc.sym
return DontInlineHere("sameSymbols (ie caller == callee)")
}
@@ -1031,7 +1028,6 @@ abstract class Inliners extends SubComponent {
case Public => true
}
private def sameSymbols = caller.sym == inc.sym
- private def sameOwner = caller.owner == inc.owner
/** Gives green light for inlining (which may still be vetoed later). Heuristics:
* - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
@@ -1047,9 +1043,9 @@ abstract class Inliners extends SubComponent {
if (caller.isInClosure) score -= 2
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
- if (inc.isSmall) score += 1;
+ if (inc.isSmall) score += 1
// if (inc.hasClosureParam) score += 2
- if (inc.isLarge) score -= 1;
+ if (inc.isLarge) score -= 1
if (caller.isSmall && isLargeSum) {
score -= 1
debuglog(s"inliner score decreased to $score because small caller $caller would become large")
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 7f5f412a20..c341d33a62 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -52,7 +52,7 @@ abstract class Changes {
private val changedTypeParams = new mutable.HashSet[String]
private def sameParameterSymbolNames(sym1: Symbol, sym2: Symbol): Boolean =
- sameSymbol(sym1, sym2, true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140
+ sameSymbol(sym1, sym2, simple = true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140
private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean =
if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName
private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
@@ -61,12 +61,7 @@ abstract class Changes {
annotationsChecked.forall(a =>
(sym1.hasAnnotation(a) == sym2.hasAnnotation(a)))
- private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = {
- def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]"
- val res = sameType0(tp1, tp2)
- //if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2))
- res
- }
+ private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = sameType0(tp1, tp2)
private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match {
/*case (ErrorType, _) => false
@@ -95,11 +90,11 @@ abstract class Changes {
} else
!sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName)
+ // @M! normalize reduces higher-kinded case to PolyType's
testSymbols && sameType(pre1, pre2) &&
(sym1.variance == sym2.variance) &&
((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
sameTypes(args1, args2))
- // @M! normalize reduces higher-kinded case to PolyType's
case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
@@ -126,7 +121,7 @@ abstract class Changes {
case (NullaryMethodType(res1), NullaryMethodType(res2)) =>
sameType(res1, res2)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- sameTypeParams(tparams1, tparams2)(false) && sameType(res1, res2)(false)
+ sameTypeParams(tparams1, tparams2)(strict = false) && sameType(res1, res2)(strict = false)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
sameType(lo1, lo2) && sameType(hi1, hi2)
case (BoundedWildcardType(bounds), _) =>
@@ -170,17 +165,16 @@ abstract class Changes {
/** Return the list of changes between 'from' and 'toSym.info'.
*/
def changeSet(from: Type, toSym: Symbol): List[Change] = {
- implicit val defaultReason = "types"
implicit val defaultStrictTypeRefTest = true
val to = toSym.info
- changedTypeParams.clear
+ changedTypeParams.clear()
def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE | SYNTHETIC)
val cs = new mutable.ListBuffer[Change]
if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) })
cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString)
- if (!sameTypeParams(from.typeParams, to.typeParams)(false))
+ if (!sameTypeParams(from.typeParams, to.typeParams)(strict = false))
cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams))
// new members not yet visited
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
deleted file mode 100644
index cdde768274..0000000000
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import io.Path
-import scala.collection._
-import symtab.Flags
-import scala.tools.nsc.io.AbstractFile
-import scala.reflect.internal.util.SourceFile
-
-trait DependencyAnalysis extends SubComponent with Files {
- import global._
-
- val phaseName = "dependencyAnalysis"
-
- def off = settings.make.isDefault || settings.make.value == "all"
- def shouldCheckClasspath = settings.make.value != "transitivenocp"
-
- def newPhase(prev: Phase) = new AnalysisPhase(prev)
-
- private def depPath = Path(settings.dependenciesFile.value)
- def loadDependencyAnalysis(): Boolean = (
- depPath.path != "none" && depPath.isFile && loadFrom(
- AbstractFile.getFile(depPath),
- path => AbstractFile.getFile(depPath.parent resolve Path(path))
- )
- )
- def saveDependencyAnalysis(): Unit = {
- if (!depPath.exists)
- dependenciesFile = AbstractFile.getFile(depPath.createFile())
-
- /** The directory where file lookup should start */
- val rootPath = depPath.parent.normalize
- saveDependencies(
- file => rootPath.relativize(Path(file.file).normalize).path
- )
- }
-
- lazy val maxDepth = settings.make.value match {
- case "changed" => 0
- case "immediate" => 1
- case _ => Int.MaxValue
- }
-
- // todo: order insensible checking and, also checking timestamp?
- def validateClasspath(cp1: String, cp2: String): Boolean = cp1 == cp2
-
- def nameToFile(src: AbstractFile, name: String) =
- settings.outputDirs.outputDirFor(src)
- .lookupPathUnchecked(name.toString.replace(".", java.io.File.separator) + ".class", false)
-
- private var depFile: Option[AbstractFile] = None
-
- def dependenciesFile_=(file: AbstractFile) {
- assert(file ne null)
- depFile = Some(file)
- }
-
- def dependenciesFile: Option[AbstractFile] = depFile
-
- def classpath = settings.classpath.value
- def newDeps = new FileDependencies(classpath)
-
- var dependencies = newDeps
-
- def managedFiles = dependencies.dependencies.keySet
-
- /** Top level definitions per source file. */
- val definitions: mutable.Map[AbstractFile, List[Symbol]] =
- new mutable.HashMap[AbstractFile, List[Symbol]] {
- override def default(f: AbstractFile) = Nil
- }
-
- /** External references used by source file. */
- val references: mutable.Map[AbstractFile, immutable.Set[String]] =
- new mutable.HashMap[AbstractFile, immutable.Set[String]] {
- override def default(f: AbstractFile) = immutable.Set()
- }
-
- /** External references for inherited members used in the source file */
- val inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] =
- new mutable.HashMap[AbstractFile, immutable.Set[Inherited]] {
- override def default(f: AbstractFile) = immutable.Set()
- }
-
- /** Write dependencies to the current file. */
- def saveDependencies(fromFile: AbstractFile => String) =
- if(dependenciesFile.isDefined)
- dependencies.writeTo(dependenciesFile.get, fromFile)
-
- /** Load dependencies from the given file and save the file reference for
- * future saves.
- */
- def loadFrom(f: AbstractFile, toFile: String => AbstractFile): Boolean = {
- dependenciesFile = f
- FileDependencies.readFrom(f, toFile) match {
- case Some(fd) =>
- val success = if (shouldCheckClasspath) validateClasspath(fd.classpath, classpath) else true
- dependencies = if (success) fd else {
- if (settings.debug.value)
- println("Classpath has changed. Nuking dependencies")
- newDeps
- }
-
- success
- case None => false
- }
- }
-
- def calculateFiles(files: List[SourceFile]): List[SourceFile] =
- if (off) files
- else if (dependencies.isEmpty) {
- println("No known dependencies. Compiling " +
- (if (settings.debug.value) files.mkString(", ") else "everything"))
- files
- } else {
- val (direct, indirect) = dependencies.invalidatedFiles(maxDepth);
- val filtered = files.filter(x => {
- val f = x.file.absolute
- direct(f) || indirect(f) || !dependencies.containsFile(f);
- })
- filtered match {
- case Nil => println("No changes to recompile");
- case x => println("Recompiling " + (
- if(settings.debug.value) x.mkString(", ") else x.length + " files")
- )
- }
- filtered
- }
-
- case class Inherited(qualifier: String, member: Name)
-
- class AnalysisPhase(prev: Phase) extends StdPhase(prev) {
-
- override def cancelled(unit: CompilationUnit) =
- super.cancelled(unit) && !unit.isJava
-
- def apply(unit : global.CompilationUnit) {
- val f = unit.source.file.file
- // When we're passed strings by the interpreter
- // they have no source file. We simply ignore this case
- // as irrelevant to dependency analysis.
- if (f != null){
- val source: AbstractFile = unit.source.file;
- for (d <- unit.icode){
- val name = d.toString
- d.symbol match {
- case s : ModuleClassSymbol =>
- val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass }
-
- if (isTopLevelModule && (s.companionModule != NoSymbol)) {
- dependencies.emits(source, nameToFile(unit.source.file, name))
- }
- dependencies.emits(source, nameToFile(unit.source.file, name + "$"))
- case _ =>
- dependencies.emits(source, nameToFile(unit.source.file, name))
- }
- }
-
- dependencies.reset(source)
- for (d <- unit.depends; if (d.sourceFile != null)){
- dependencies.depends(source, d.sourceFile)
- }
- }
-
- // find all external references in this compilation unit
- val file = unit.source.file
- references += file -> immutable.Set.empty[String]
- inherited += file -> immutable.Set.empty[Inherited]
-
- val buf = new mutable.ListBuffer[Symbol]
-
- (new Traverser {
- override def traverse(tree: Tree) {
- if ((tree.symbol ne null)
- && (tree.symbol != NoSymbol)
- && (!tree.symbol.isPackage)
- && (!tree.symbol.isJavaDefined)
- && (!tree.symbol.tpe.isError)
- && ((tree.symbol.sourceFile eq null)
- || (tree.symbol.sourceFile.path != file.path))
- && (!tree.symbol.isClassConstructor)) {
- updateReferences(tree.symbol.fullName)
- // was "at uncurryPhase.prev", which is actually non-deterministic
- // because the continuations plugin may or may not supply uncurry's
- // immediately preceding phase.
- beforeRefchecks(checkType(tree.symbol.tpe))
- }
-
- tree match {
- case cdef: ClassDef if !cdef.symbol.hasPackageFlag &&
- !cdef.symbol.isAnonymousFunction =>
- if (cdef.symbol != NoSymbol) buf += cdef.symbol
- // was "at erasurePhase.prev"
- beforeExplicitOuter {
- for (s <- cdef.symbol.info.decls)
- s match {
- case ts: TypeSymbol if !ts.isClass =>
- checkType(s.tpe)
- case _ =>
- }
- }
- super.traverse(tree)
-
- case ddef: DefDef =>
- // was "at typer.prev"
- beforeTyper { checkType(ddef.symbol.tpe) }
- super.traverse(tree)
- case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
- if (!a.symbol.isConstructor &&
- !a.symbol.owner.isPackageClass &&
- !isSameType(q.tpe, a.symbol.owner.tpe))
- inherited += file ->
- (inherited(file) + Inherited(q.symbol.tpe.resultType.safeToString, n))
- super.traverse(tree)
- case _ =>
- super.traverse(tree)
- }
- }
-
- def checkType(tpe: Type): Unit =
- tpe match {
- case t: MethodType =>
- checkType(t.resultType)
- for (s <- t.params) checkType(s.tpe)
-
- case t: TypeRef =>
- if (t.sym.isAliasType) {
- updateReferences(t.typeSymbolDirect.fullName)
- checkType(t.typeSymbolDirect.info)
- }
- updateReferences(t.typeSymbol.fullName)
- for (tp <- t.args) checkType(tp)
-
- case t: PolyType =>
- checkType(t.resultType)
- updateReferences(t.typeSymbol.fullName)
-
- case t: NullaryMethodType =>
- checkType(t.resultType)
- updateReferences(t.typeSymbol.fullName)
-
- case t =>
- updateReferences(t.typeSymbol.fullName)
- }
-
- def updateReferences(s: String): Unit =
- references += file -> (references(file) + s)
-
- }).apply(unit.body)
-
- definitions(unit.source.file) = buf.toList
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala
deleted file mode 100644
index 194351a13f..0000000000
--- a/src/compiler/scala/tools/nsc/dependencies/Files.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import java.io.{InputStream, OutputStream, PrintStream, InputStreamReader, BufferedReader}
-import io.{AbstractFile, PlainFile, VirtualFile}
-
-import scala.collection._
-
-
-trait Files { self : SubComponent =>
-
- class FileDependencies(val classpath: String) {
- import FileDependencies._
-
- class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]] {
- override def default(key: AbstractFile) = {
- this(key) = new mutable.HashSet[AbstractFile]
- this(key)
- }
- }
-
- val dependencies = new Tracker
- val targets = new Tracker
-
- def isEmpty = dependencies.isEmpty && targets.isEmpty
-
- def emits(source: AbstractFile, result: AbstractFile) =
- targets(source) += result
- def depends(from: AbstractFile, on: AbstractFile) =
- dependencies(from) += on
-
- def reset(file: AbstractFile) = dependencies -= file
-
- def cleanEmpty = {
- dependencies foreach {case (_, value) =>
- value retain (x => x.exists && (x ne removedFile))}
- dependencies retain ((key, value) => key.exists && !value.isEmpty)
- targets foreach {case (_, value) => value retain (_.exists)}
- targets retain ((key, value) => key.exists && !value.isEmpty)
- }
-
- def containsFile(f: AbstractFile) = targets.contains(f.absolute)
-
- def invalidatedFiles(maxDepth: Int) = {
- val direct = new mutable.HashSet[AbstractFile]
-
- for ((file, products) <- targets) {
- // This looks a bit odd. It may seem like one should invalidate a file
- // if *any* of its dependencies are older than it. The forall is there
- // to deal with the fact that a) Some results might have been orphaned
- // and b) Some files might not need changing.
- direct(file) ||= products.forall(d => d.lastModified < file.lastModified)
- }
-
- val indirect = dependentFiles(maxDepth, direct)
-
- for ((source, targets) <- targets
- if direct(source) || indirect(source) || (source eq removedFile)) {
- targets foreach (_.delete)
- targets -= source
- }
-
- (direct, indirect)
- }
-
- /** Return the set of files that depend on the given changed files.
- * It computes the transitive closure up to the given depth.
- */
- def dependentFiles(depth: Int, changed: Set[AbstractFile]): Set[AbstractFile] = {
- val indirect = new mutable.HashSet[AbstractFile]
- val newInvalidations = new mutable.HashSet[AbstractFile]
-
- def invalid(file: AbstractFile) =
- indirect(file) || changed(file) || (file eq removedFile)
-
- def go(i: Int) : Unit = if(i > 0) {
- newInvalidations.clear
- for((target, depends) <- dependencies if !invalid(target);
- d <- depends)
- newInvalidations(target) ||= invalid(d)
-
- indirect ++= newInvalidations
- if (!newInvalidations.isEmpty) go(i - 1)
- }
-
- go(depth)
-
- indirect --= changed
- }
-
- def writeTo(file: AbstractFile, fromFile: AbstractFile => String): Unit =
- writeToFile(file)(out => writeTo(new PrintStream(out), fromFile))
-
- def writeTo(print: PrintStream, fromFile: AbstractFile => String): Unit = {
- def emit(tracker: Tracker) =
- for ((f, ds) <- tracker; d <- ds) print.println(fromFile(f) + arrow + fromFile(d))
-
- cleanEmpty
- print.println(classpath)
- print.println(separator)
- emit(dependencies)
- print.println(separator)
- emit(targets)
- }
- }
-
- object FileDependencies {
- private val separator:String = "-------"
- private val arrow = " -> "
- private val removedFile = new VirtualFile("removed")
-
- private def validLine(l: String) = (l != null) && (l != separator)
-
- def readFrom(file: AbstractFile, toFile: String => AbstractFile): Option[FileDependencies] =
- readFromFile(file) { in =>
- val reader = new BufferedReader(new InputStreamReader(in))
- val it = new FileDependencies(reader.readLine)
-
- def readLines(valid: Boolean)(f: (AbstractFile, AbstractFile) => Unit): Boolean = {
- var continue = valid
- var line: String = null
- while (continue && {line = reader.readLine; validLine(line)}) {
- line.split(arrow) match {
- case Array(from, on) => f(toFile(from), toFile(on))
- case _ =>
- global.inform("Parse error: Unrecognised string " + line)
- continue = false
- }
- }
- continue
- }
-
- reader.readLine
-
- val dResult = readLines(true)(
- (_, _) match {
- case (null, _) => // fromFile is removed, it's ok
- case (fromFile, null) =>
- // onFile is removed, should recompile fromFile
- it.depends(fromFile, removedFile)
- case (fromFile, onFile) => it.depends(fromFile, onFile)
- })
-
- readLines(dResult)(
- (_, _) match {
- case (null, null) =>
- // source and target are all removed, it's ok
- case (null, targetFile) =>
- // source is removed, should remove relative target later
- it.emits(removedFile, targetFile)
- case (_, null) =>
- // it may has been cleaned outside, or removed during last phase
- case (sourceFile, targetFile) => it.emits(sourceFile, targetFile)
- })
-
- Some(it)
- }
- }
-
- def writeToFile[T](file: AbstractFile)(f: OutputStream => T) : T = {
- val out = file.bufferedOutput
- try {
- f(out)
- } finally {
- out.close
- }
- }
-
- def readFromFile[T](file: AbstractFile)(f: InputStream => T) : T = {
- val in = file.input
- try{
- f(in)
- } finally {
- in.close
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
deleted file mode 100644
index 3e7ac573e9..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.FakePos
-
-import dependencies._
-import io.AbstractFile
-import scala.language.implicitConversions
-
-trait BuildManager {
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile])
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile])
-
- /** The given files have been modified by the user. Recompile
- * them and their dependent files.
- */
- def update(added: Set[AbstractFile], removed: Set[AbstractFile])
-
- /** Notification that the supplied set of files is being built */
- def buildingFiles(included: Set[AbstractFile]) {}
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String)
-
- def compiler: scala.tools.nsc.Global
-
- /** Delete classfiles derived from the supplied set of sources */
- def deleteClassfiles(sources : Set[AbstractFile]) {
- val targets = compiler.dependencyAnalysis.dependencies.targets
- for(source <- sources; cf <- targets(source))
- cf.delete
- }
-}
-
-
-/** Simple driver for testing the build manager. It presents
- * the user to a 'resident compiler' prompt. Each line is
- * interpreted as a set of files that have changed. The builder
- * then derives the dependent files and recompiles them.
- */
-object BuildManagerTest extends EvalLoop {
-
- def prompt = "builder > "
-
- private def buildError(msg: String) {
- println(msg + "\n scalac -help gives more information")
- }
-
- def main(args: Array[String]) {
- implicit def filesToSet(fs: List[String]): Set[AbstractFile] = {
- def partition(s: String, r: Tuple2[List[AbstractFile], List[String]])= {
- val v = AbstractFile.getFile(s)
- if (v == null) (r._1, s::r._2) else (v::r._1, r._2)
- }
- val result = fs.foldRight((List[AbstractFile](), List[String]()))(partition)
- if (!result._2.isEmpty)
- Console.err.println("No such file(s): " + result._2.mkString(","))
- Set.empty ++ result._1
- }
-
- val settings = new Settings(buildError)
- settings.Ybuildmanagerdebug.value = true
- val command = new CompilerCommand(args.toList, settings)
-// settings.make.value = "off"
-// val buildManager: BuildManager = new SimpleBuildManager(settings)
- val buildManager: BuildManager = new RefinedBuildManager(settings)
-
- buildManager.addSourceFiles(command.files)
-
- // enter resident mode
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args, settings)
- buildManager.update(command.files, Set.empty)
- }
-
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
deleted file mode 100644
index b95f1fa7ca..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import ast.Trees
-import ast.Positions
-import scala.reflect.internal.util.{SourceFile, Position, RangePosition, NoPosition}
-import scala.tools.nsc.util.WorkScheduler
-import scala.collection.mutable.ListBuffer
-
-/** Handling range positions
- * atPos, the main method in this trait, will add positions to a tree,
- * and will ensure the following properties:
- *
- * 1. All nodes between the root of the tree and nodes that already have positions
- * will be assigned positions.
- * 2. No node which already has a position will be assigned a different range; however
- * a RangePosition might become a TransparentPosition.
- * 3. The position of each assigned node includes the positions of each of its children.
- * 4. The positions of all solid descendants of children of an assigned node
- * are mutually non-overlapping.
- *
- * Here, the solid descendant of a node are:
- *
- * If the node has a TransparentPosition, the solid descendants of all its children
- * Otherwise, the singleton consisting of the node itself.
- */
-trait RangePositions extends Trees with Positions {
-self: scala.tools.nsc.Global =>
-
- case class Range(pos: Position, tree: Tree) {
- def isFree = tree == EmptyTree
- }
-
- override def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new RangePosition(source, start, point, end)
-
- /** A position that wraps a set of trees.
- * The point of the wrapping position is the point of the default position.
- * If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns default position that is either focused or not.
- */
- override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
- val ranged = trees filter (_.pos.isRange)
- if (ranged.isEmpty) if (focus) default.focus else default
- else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
- }
-
- /** A position that wraps a non-empty set of trees.
- * The point of the wrapping position is the point of the first trees' position.
- * If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns first tree's position.
- */
- override def wrappingPos(trees: List[Tree]): Position = {
- val headpos = trees.head.pos
- if (headpos.isDefined) wrappingPos(headpos, trees) else headpos
- }
-
- // -------------- ensuring no overlaps -------------------------------
-
- /** Ensure that given tree has no positions that overlap with
- * any of the positions of `others`. This is done by
- * shortening the range, assigning TransparentPositions
- * to some of the nodes in `tree` or focusing on the position.
- */
- override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
- def isOverlapping(pos: Position) =
- pos.isRange && (others exists (pos overlaps _.pos))
- if (isOverlapping(tree.pos)) {
- val children = tree.children
- children foreach (ensureNonOverlapping(_, others, focus))
- if (tree.pos.isOpaqueRange) {
- val wpos = wrappingPos(tree.pos, children, focus)
- tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
- }
- }
- }
-
- def solidDescendants(tree: Tree): List[Tree] =
- if (tree.pos.isTransparent) tree.children flatMap solidDescendants
- else List(tree)
-
- /** A free range from `lo` to `hi` */
- private def free(lo: Int, hi: Int): Range =
- Range(new RangePosition(null, lo, lo, hi), EmptyTree)
-
- /** The maximal free range */
- private lazy val maxFree: Range = free(0, Int.MaxValue)
-
- /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
- private def maybeFree(lo: Int, hi: Int) =
- if (lo < hi) List(free(lo, hi))
- else List()
-
- /** Insert `pos` into ranges `rs` if possible;
- * otherwise add conflicting trees to `conflicting`.
- */
- private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match {
- case List() =>
- assert(conflicting.nonEmpty)
- rs
- case r :: rs1 =>
- assert(!t.pos.isTransparent)
- if (r.isFree && (r.pos includes t.pos)) {
-// println("subdividing "+r+"/"+t.pos)
- maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
- } else {
- if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
- r :: insert(rs1, t, conflicting)
- }
- }
-
- /** Replace elem `t` of `ts` by `replacement` list. */
- private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] =
- if (ts.head == t) replacement ::: ts.tail
- else ts.head :: replace(ts.tail, t, replacement)
-
- /** Does given list of trees have mutually non-overlapping positions?
- * pre: None of the trees is transparent
- */
- def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = {
- var ranges = List(maxFree)
- for (ct <- cts) {
- if (ct.pos.isOpaqueRange) {
- val conflicting = new ListBuffer[Tree]
- ranges = insert(ranges, ct, conflicting)
- if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct))
- }
- }
- List()
- }
-
- // -------------- setting positions -------------------------------
-
- /** Set position of all children of a node
- * @param pos A target position.
- * Uses the point of the position as the point of all positions it assigns.
- * Uses the start of this position as an Offset position for unpositioed trees
- * without children.
- * @param trees The children to position. All children must be positionable.
- */
- private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
- for (tree <- trees) {
- if (!tree.isEmpty && tree.pos == NoPosition) {
- val children = tree.children
- if (children.isEmpty) {
- tree setPos pos.focus
- } else {
- setChildrenPos(pos, children)
- tree setPos wrappingPos(pos, children)
- }
- }
- }
- } catch {
- case ex: Exception =>
- println("error while set children pos "+pos+" of "+trees)
- throw ex
- }
-
- /** Position a tree.
- * This means: Set position of a node and position all its unpositioned children.
- */
- override def atPos[T <: Tree](pos: Position)(tree: T): T = {
- if (pos.isOpaqueRange) {
- if (!tree.isEmpty && tree.pos == NoPosition) {
- tree.setPos(pos)
- val children = tree.children
- if (children.nonEmpty) {
- if (children.tail.isEmpty) atPos(pos)(children.head)
- else setChildrenPos(pos, children)
- }
- }
- tree
- } else {
- super.atPos(pos)(tree)
- }
- }
-
- // ---------------- Validating positions ----------------------------------
-
- override def validatePositions(tree: Tree) {
- def reportTree(prefix : String, tree : Tree) {
- val source = if (tree.pos.isDefined) tree.pos.source else ""
- inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
- inform("")
- inform(treeStatus(tree))
- inform("")
- }
-
- def positionError(msg: String)(body : => Unit) {
- inform("======= Position error\n" + msg)
- body
- inform("\nWhile validating #" + tree.id)
- inform(treeStatus(tree))
- inform("\nChildren:")
- tree.children map (t => " " + treeStatus(t, tree)) foreach inform
- inform("=======")
- throw new ValidateException(msg)
- }
-
- def validate(tree: Tree, encltree: Tree): Unit = {
-
- if (!tree.isEmpty) {
- if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
- println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
-
- if (!tree.pos.isDefined)
- positionError("Unpositioned tree #"+tree.id) {
- inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
- inform("%15s %s".format("enclosing", treeStatus(encltree)))
- encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
- }
- if (tree.pos.isRange) {
- if (!encltree.pos.isRange)
- positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
- reportTree("Enclosing", encltree)
- reportTree("Enclosed", tree)
- }
- if (!(encltree.pos includes tree.pos))
- positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
- reportTree("Enclosing", encltree)
- reportTree("Enclosed", tree)
- }
-
- findOverlapping(tree.children flatMap solidDescendants) match {
- case List() => ;
- case xs => {
- positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
- reportTree("Ancestor", tree)
- for((x, y) <- xs) {
- reportTree("First overlapping", x)
- reportTree("Second overlapping", y)
- }
- }
- }
- }
- }
- for (ct <- tree.children flatMap solidDescendants) validate(ct, tree)
- }
- }
-
- if (phase.id <= currentRun.typerPhase.id)
- validate(tree, tree)
- }
-
- class ValidateException(msg : String) extends Exception(msg)
-
- // ---------------- Locating trees ----------------------------------
-
- /** A locator for trees with given positions.
- * Given a position `pos`, locator.apply returns
- * the smallest tree that encloses `pos`.
- */
- class Locator(pos: Position) extends Traverser {
- var last: Tree = _
- def locateIn(root: Tree): Tree = {
- this.last = EmptyTree
- traverse(root)
- this.last
- }
- protected def isEligible(t: Tree) = !t.pos.isTransparent
- override def traverse(t: Tree) {
- t match {
- case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
- traverse(tt.original)
- case _ =>
- if (t.pos includes pos) {
- if (isEligible(t)) last = t
- super.traverse(t)
- } else t match {
- case mdef: MemberDef =>
- traverseTrees(mdef.mods.annotations)
- case _ =>
- }
- }
- }
- }
-
- class TypedLocator(pos: Position) extends Locator(pos) {
- override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
deleted file mode 100644
index b2ef45a7d8..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ /dev/null
@@ -1,355 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.util.control.Breaks._
-import scala.tools.nsc.symtab.Flags
-
-import dependencies._
-import scala.reflect.internal.util.FakePos
-import util.ClassPath
-import io.AbstractFile
-import scala.tools.util.PathResolver
-
-/** A more defined build manager, based on change sets. For each
- * updated source file, it computes the set of changes to its
- * definitions, then checks all dependent units to see if the
- * changes require a compilation. It repeats this process until
- * a fixpoint is reached.
- */
-@deprecated("Use sbt incremental compilation mechanism", "2.10.0")
-class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager {
-
- class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
-
- def this(settings: Settings) =
- this(settings, new ConsoleReporter(settings))
-
- override def computeInternalPhases() {
- super.computeInternalPhases
- phasesSet += dependencyAnalysis
- }
- lazy val _classpath = new NoSourcePathPathResolver(settings).result
- override def classPath = _classpath.asInstanceOf[ClassPath[platform.BinaryRepr]]
- // See discussion in JavaPlatForm for why we need a cast here.
-
- def newRun() = new Run()
- }
-
- class NoSourcePathPathResolver(settings: Settings) extends PathResolver(settings) {
- override def containers = Calculated.basis.dropRight(1).flatten.distinct
- }
-
- protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
- val compiler = newCompiler(settings)
- import compiler.{ Symbol, Type, beforeErasure }
- import compiler.dependencyAnalysis.Inherited
-
- private case class SymWithHistory(sym: Symbol, befErasure: Type)
-
- /** Managed source files. */
- private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
- private val definitions: mutable.Map[AbstractFile, List[SymWithHistory]] =
- new mutable.HashMap[AbstractFile, List[SymWithHistory]] {
- override def default(key: AbstractFile) = Nil
- }
-
- /** External references used by source file. */
- private var references: mutable.Map[AbstractFile, immutable.Set[String]] = _
-
- /** External references for inherited members */
- private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _
-
- /** Reverse of definitions, used for caching */
- private var classes: mutable.Map[String, AbstractFile] =
- new mutable.HashMap[String, AbstractFile] {
- override def default(key: String) = null
- }
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile]) {
- sources ++= files
- update(files)
- }
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile]) {
- sources --= files
- deleteClassfiles(files)
- update(invalidatedByRemove(files))
- }
-
- /** Return the set of invalidated files caused by removing the given files.
- */
- private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
- val changes = new mutable.HashMap[Symbol, List[Change]]
- for (f <- files; SymWithHistory(sym, _) <- definitions(f))
- changes += sym -> List(Removed(Class(sym.fullName)))
- invalidated(files, changes)
- }
-
- def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
- sources --= removed
- deleteClassfiles(removed)
- update(added ++ invalidatedByRemove(removed))
- }
-
- /** The given files have been modified by the user. Recompile
- * them and all files that depend on them. Only files that
- * have been previously added as source files are recompiled.
- * Files that were already compiled are taken out from the result
- * of the dependency analysis.
- */
- private def update(files: Set[AbstractFile]) = {
- val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] =
- mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]()
- compiler.reporter.reset()
-
- // See if we really have corresponding symbols, not just those
- // which share the name
- def isCorrespondingSym(from: Symbol, to: Symbol): Boolean =
- (from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) && // has to run in 2.8, so no hasTraitFlag
- (from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE))
-
- // For testing purposes only, order irrelevant for compilation
- def toStringSet(set: Set[AbstractFile]): String =
- set.toList sortBy (_.name) mkString("Set(", ", ", ")")
-
- def update0(files: Set[AbstractFile]): Unit = if (!files.isEmpty) {
- deleteClassfiles(files)
- val run = compiler.newRun()
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("compiling " + toStringSet(files))
- buildingFiles(files)
-
- run.compileFiles(files.toList)
- if (compiler.reporter.hasErrors) {
- return
- }
-
- // Deterministic behaviour required by partest
- val changesOf = new mutable.HashMap[Symbol, List[Change]] {
- override def toString: String = {
- val changesOrdered =
- toList.map(e => {
- e._1.toString + " -> " +
- e._2.sortBy(_.toString).mkString("List(", ", ", ")")
- })
- changesOrdered.sorted.mkString("Map(", ", ", ")")
- }
- }
- val additionalDefs: mutable.HashSet[AbstractFile] = mutable.HashSet.empty
-
- val defs = compiler.dependencyAnalysis.definitions
- for (src <- files) {
- if (definitions(src).isEmpty)
- additionalDefs ++= compiler.dependencyAnalysis.
- dependencies.dependentFiles(1, mutable.Set(src))
- else {
- val syms = defs(src)
- for (sym <- syms) {
- definitions(src).find(
- s => (s.sym.fullName == sym.fullName) &&
- isCorrespondingSym(s.sym, sym)) match {
- case Some(SymWithHistory(oldSym, info)) =>
- val changes = changeSet(oldSym.info, sym)
- val changesErasure = beforeErasure(changeSet(info, sym))
-
- changesOf(oldSym) = (changes ++ changesErasure).distinct
- case _ =>
- // a new top level definition
- changesOf(sym) = sym.parentSymbols filter (_.isSealed) map (p =>
- changeChangeSet(p, sym+" extends a sealed "+p))
- }
- }
- // Create a change for the top level classes that were removed
- val removed = definitions(src) filterNot ((s:SymWithHistory) =>
- syms.find(_.fullName == (s.sym.fullName)) != None)
- for (s <- removed) {
- changesOf(s.sym) = List(removeChangeSet(s.sym))
- }
- }
- }
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("Changes: " + changesOf)
- updateDefinitions(files)
- val invalid = invalidated(files, changesOf, additionalDefs)
- update0(checkCycles(invalid, files, coll))
- }
-
- update0(files)
- // remove the current run in order to save some memory
- compiler.dropRun()
- }
-
- // Attempt to break the cycling reference deps as soon as possible and reduce
- // the number of compilations to minimum without having too coarse grained rules
- private def checkCycles(files: Set[AbstractFile], initial: Set[AbstractFile],
- collect: mutable.Map[AbstractFile, immutable.Set[AbstractFile]]):
- Set[AbstractFile] = {
- def followChain(set: Set[AbstractFile], rest: immutable.Set[AbstractFile]):
- immutable.Set[AbstractFile] = {
- val deps:Set[AbstractFile] = set.flatMap(
- s => collect.get(s) match {
- case Some(x) => x
- case _ => Set[AbstractFile]()
- })
- val newDeps = deps -- rest
- if (newDeps.isEmpty) rest else followChain(newDeps, rest ++ newDeps)
- }
- var res:Set[AbstractFile] = mutable.Set()
- files.foreach( f =>
- if (collect contains f) {
- val chain = followChain(Set(f), immutable.Set()) ++ files
- chain.foreach((fc: AbstractFile) => collect += fc -> chain)
- res ++= chain
- } else
- res += f
- )
-
- initial.foreach((f: AbstractFile) => collect += (f -> (collect.getOrElse(f, immutable.Set()) ++ res)))
- if (res.subsetOf(initial)) Set() else res
- }
-
- /** Return the set of source files that are invalidated by the given changes. */
- def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]],
- processed: Set[AbstractFile] = Set.empty):
- Set[AbstractFile] = {
- val buf = new mutable.HashSet[AbstractFile]
- val newChangesOf = new mutable.HashMap[Symbol, List[Change]]
- var directDeps =
- compiler.dependencyAnalysis.dependencies.dependentFiles(1, files)
-
- def invalidate(file: AbstractFile, reason: String, change: Change) = {
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("invalidate " + file + " because " + reason + " [" + change + "]")
- buf += file
- directDeps -= file
- for (syms <- definitions(file)) // fixes #2557
- newChangesOf(syms.sym) = List(change, parentChangeSet(syms.sym))
- break
- }
-
- for ((oldSym, changes) <- changesOf; change <- changes) {
- def checkParents(cls: Symbol, file: AbstractFile) {
- val parentChange = cls.parentSymbols exists (_.fullName == oldSym.fullName)
- // if (settings.buildmanagerdebug.value)
- // compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
- change match {
- case Changed(Class(_)) if parentChange =>
- invalidate(file, "parents have changed", change)
-
- case Changed(Definition(_)) if parentChange =>
- invalidate(file, "inherited method changed", change)
-
- case Added(Definition(_)) if parentChange =>
- invalidate(file, "inherited new method", change)
-
- case Removed(Definition(_)) if parentChange =>
- invalidate(file, "inherited method removed", change)
-
- case _ => ()
- }
- }
-
- def checkInterface(cls: Symbol, file: AbstractFile) {
- change match {
- case Added(Definition(name)) =>
- if (cls.info.decls.iterator.exists(_.fullName == name))
- invalidate(file, "of new method with existing name", change)
- case Changed(Class(name)) =>
- if (cls.info.typeSymbol.fullName == name)
- invalidate(file, "self type changed", change)
- case _ =>
- ()
- }
- }
-
- def checkReferences(file: AbstractFile) {
- //if (settings.buildmanagerdebug.value)
- // compiler.inform(file + ":" + references(file))
- val refs = references(file)
- if (refs.isEmpty)
- invalidate(file, "it is a direct dependency and we don't yet have finer-grained dependency information", change)
- else {
- change match {
- case Removed(Definition(name)) if refs(name) =>
- invalidate(file, "it references deleted definition", change)
- case Removed(Class(name)) if (refs(name)) =>
- invalidate(file, "it references deleted class", change)
- case Changed(Class(name)) if (refs(name)) =>
- invalidate(file, "it references changed class", change)
- case Changed(Definition(name)) if (refs(name)) =>
- invalidate(file, "it references changed definition", change)
- case Added(Definition(name)) if (refs(name)) =>
- invalidate(file, "it references added definition", change)
- case _ => ()
- }
- }
- }
-
- def checkInheritedReferences(file: AbstractFile) {
- val refs = inherited(file)
- if (!refs.isEmpty)
- change match {
- case ParentChanged(Class(name)) =>
- for (Inherited(q, member) <- refs.find(p => (p != null && p.qualifier == name));
- classFile <- classes.get(q);
- defs <- definitions.get(classFile);
- s <- defs.find(p => p.sym.fullName == q)
- if ((s.sym).tpe.nonPrivateMember(member) == compiler.NoSymbol))
- invalidate(file, "it references invalid (no longer inherited) definition", change)
- ()
- case _ => ()
- }
- }
-
- for (file <- directDeps) {
- breakable {
- for (cls <- definitions(file)) checkParents(cls.sym, file)
- for (cls <- definitions(file)) checkInterface(cls.sym, file)
- checkReferences(file)
- checkInheritedReferences(file)
- }
- }
- }
- if (buf.isEmpty)
- processed
- else
- invalidated(buf.clone() --= processed, newChangesOf, processed ++ buf)
- }
-
- /** Update the map of definitions per source file */
- private def updateDefinitions(files: Set[AbstractFile]) {
- for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) {
- definitions(src) = (localDefs map (s => {
- this.classes += s.fullName -> src
- SymWithHistory(s.cloneSymbol, beforeErasure(s.info.cloneInfo(s)))
- }))
- }
- this.references = compiler.dependencyAnalysis.references
- this.inherited = compiler.dependencyAnalysis.inherited
- }
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
- val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
- if (success)
- sources ++= compiler.dependencyAnalysis.managedFiles
- success
- }
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
- compiler.dependencyAnalysis.dependenciesFile = file
- compiler.dependencyAnalysis.saveDependencies(fromFile)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
deleted file mode 100644
index 465dcaaf1c..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import dependencies._
-
-import scala.reflect.internal.util.FakePos
-import io.AbstractFile
-
-/** A simple build manager, using the default scalac dependency tracker.
- * The transitive closure of all dependent files on a modified file
- * is recompiled at once.
- *
- * It is equivalent to using a resident compiler mode with the
- * '-make:transitive' option.
- */
-class SimpleBuildManager(val settings: Settings) extends BuildManager {
-
- class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
-
- def this(settings: Settings) =
- this(settings, new ConsoleReporter(settings))
-
- def newRun() = new Run()
- }
-
- protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
- val compiler = newCompiler(settings)
-
- /** Managed source files. */
- private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile]) {
- sources ++= files
- update(files)
- }
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile]) {
- sources --= files
- deleteClassfiles(files)
- update(invalidatedByRemove(files))
- }
-
-
- /** Return the set of invalidated files caused by removing the given files. */
- private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
- val deps = compiler.dependencyAnalysis.dependencies
- deps.dependentFiles(Int.MaxValue, files)
- }
-
- def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
- sources --= removed
- deleteClassfiles(removed)
- update(added ++ invalidatedByRemove(removed))
- }
-
- /** The given files have been modified by the user. Recompile
- * them and all files that depend on them. Only files that
- * have been previously added as source files are recompiled.
- */
- def update(files: Set[AbstractFile]) {
- deleteClassfiles(files)
-
- val deps = compiler.dependencyAnalysis.dependencies
- val run = compiler.newRun()
- compiler.inform("compiling " + files)
-
- val toCompile =
- (files ++ deps.dependentFiles(Int.MaxValue, files)) intersect sources
-
-
- compiler.inform("Recompiling " +
- (if(settings.debug.value) toCompile.mkString(", ")
- else toCompile.size + " files"))
-
- buildingFiles(toCompile)
-
- run.compileFiles(files.toList)
- }
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
- val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
- if (success)
- sources ++= compiler.dependencyAnalysis.managedFiles
- success
- }
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
- compiler.dependencyAnalysis.dependenciesFile = file
- compiler.dependencyAnalysis.saveDependencies(fromFile)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
deleted file mode 100644
index 1741a82775..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import CodeHandlers.NoSuccess
-import scala.util.control.ControlThrowable
-
-/**
- * The start of a simpler interface for utilizing the compiler with piecemeal
- * code strings. The "T" here could potentially be a Tree, a Type, a Symbol,
- * a Boolean, or something even more exotic.
- */
-trait CodeHandlers[T] {
- self =>
-
- // Expressions are composed of operators and operands.
- def expr(code: String): T
-
- // Statements occur as parts of blocks and templates.
- // A statement can be an import, a definition or an expression, or it can be empty.
- // Statements used in the template of a class definition can also be declarations.
- def stmt(code: String): T
- def stmts(code: String): Seq[T]
-
- object opt extends CodeHandlers[Option[T]] {
- val handler: PartialFunction[Throwable, Option[T]] = {
- case _: NoSuccess => None
- }
- val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = {
- case _: NoSuccess => Nil
- }
-
- def expr(code: String) = try Some(self.expr(code)) catch handler
- def stmt(code: String) = try Some(self.stmt(code)) catch handler
- def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq
- }
-}
-
-object CodeHandlers {
- def incomplete() = throw CodeIncomplete
- def fail(msg: String) = throw new CodeException(msg)
-
- trait NoSuccess extends ControlThrowable
- class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { }
- object CodeIncomplete extends CodeException("CodeIncomplete")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
deleted file mode 100644
index e3c0494fa3..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.internal.util.Position
-import scala.util.control.Exception.ignoring
-import scala.tools.nsc.util.stackTraceString
-
-/**
- * Machinery for the asynchronous initialization of the repl.
- */
-trait ILoopInit {
- self: ILoop =>
-
- /** Print a welcome message */
- def printWelcome() {
- import Properties._
- val welcomeMsg =
- """|Welcome to Scala %s (%s, Java %s).
- |Type in expressions to have them evaluated.
- |Type :help for more information.""" .
- stripMargin.format(versionString, javaVmName, javaVersion)
- echo(welcomeMsg)
- replinfo("[info] started at " + new java.util.Date)
- }
-
- protected def asyncMessage(msg: String) {
- if (isReplInfo || isReplPower)
- echoAndRefresh(msg)
- }
-
- private val initLock = new java.util.concurrent.locks.ReentrantLock()
- private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
- private val initLoopCondition = initLock.newCondition() // signal the whole repl is initialized
- private val initStart = System.nanoTime
-
- private def withLock[T](body: => T): T = {
- initLock.lock()
- try body
- finally initLock.unlock()
- }
- // a condition used to ensure serial access to the compiler.
- @volatile private var initIsComplete = false
- @volatile private var initError: String = null
- private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L)
-
- // the method to be called when the interpreter is initialized.
- // Very important this method does nothing synchronous (i.e. do
- // not try to use the interpreter) because until it returns, the
- // repl's lazy val `global` is still locked.
- protected def initializedCallback() = withLock(initCompilerCondition.signal())
-
- // Spins off a thread which awaits a single message once the interpreter
- // has been initialized.
- protected def createAsyncListener() = {
- io.spawn {
- withLock(initCompilerCondition.await())
- asyncMessage("[info] compiler init time: " + elapsed() + " s.")
- postInitialization()
- }
- }
-
- // called from main repl loop
- protected def awaitInitialized(): Boolean = {
- if (!initIsComplete)
- withLock { while (!initIsComplete) initLoopCondition.await() }
- if (initError != null) {
- println("""
- |Failed to initialize the REPL due to an unexpected error.
- |This is a bug, please, report it along with the error diagnostics printed below.
- |%s.""".stripMargin.format(initError)
- )
- false
- } else true
- }
- // private def warningsThunks = List(
- // () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
- // )
-
- protected def postInitThunks = List[Option[() => Unit]](
- Some(intp.setContextClassLoader _),
- if (isReplPower) Some(() => enablePowerMode(true)) else None
- ).flatten
- // ++ (
- // warningsThunks
- // )
- // called once after init condition is signalled
- protected def postInitialization() {
- try {
- postInitThunks foreach (f => addThunk(f()))
- runThunks()
- } catch {
- case ex: Throwable =>
- initError = stackTraceString(ex)
- throw ex
- } finally {
- initIsComplete = true
-
- if (isAsync) {
- asyncMessage("[info] total init time: " + elapsed() + " s.")
- withLock(initLoopCondition.signal())
- }
- }
- }
- // code to be executed only after the interpreter is initialized
- // and the lazy val `global` can be accessed without risk of deadlock.
- private var pendingThunks: List[() => Unit] = Nil
- protected def addThunk(body: => Unit) = synchronized {
- pendingThunks :+= (() => body)
- }
- protected def runThunks(): Unit = synchronized {
- if (pendingThunks.nonEmpty)
- repldbg("Clearing " + pendingThunks.size + " thunks.")
-
- while (pendingThunks.nonEmpty) {
- val thunk = pendingThunks.head
- pendingThunks = pendingThunks.tail
- thunk()
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
deleted file mode 100644
index e3440c9f8b..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-
-import scala.language.implicitConversions
-
-/** The main REPL related classes and values are as follows.
- * In addition to standard compiler classes Global and Settings, there are:
- *
- * History: an interface for session history.
- * Completion: an interface for tab completion.
- * ILoop (formerly InterpreterLoop): The umbrella class for a session.
- * IMain (formerly Interpreter): Handles the evolving state of the session
- * and handles submitting code to the compiler and handling the output.
- * InteractiveReader: how ILoop obtains input.
- * History: an interface for session history.
- * Completion: an interface for tab completion.
- * Power: a repository for more advanced/experimental features.
- *
- * ILoop contains { in: InteractiveReader, intp: IMain, settings: Settings, power: Power }
- * InteractiveReader contains { history: History, completion: Completion }
- * IMain contains { global: Global }
- */
-package object interpreter extends ReplConfig with ReplStrings {
- type JFile = java.io.File
- type JClass = java.lang.Class[_]
- type JList[T] = java.util.List[T]
- type JCollection[T] = java.util.Collection[T]
- type JPrintWriter = java.io.PrintWriter
- type InputStream = java.io.InputStream
- type OutputStream = java.io.OutputStream
-
- val IR = Results
-
- implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
-
- private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
- import scala.collection.JavaConverters._
- xs.asScala.toList map ("" + _)
- }
-
- private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz)
- private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x)
- private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg)
- private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg)
-}
diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala
deleted file mode 100644
index 7b4e385dd8..0000000000
--- a/src/compiler/scala/tools/nsc/io/Fileish.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{ InputStream }
-import java.util.jar.JarEntry
-
-/** A common interface for File-based things and Stream-based things.
- * (In particular, io.File and JarEntry.)
- */
-class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars {
- def inputStream() = input()
-
- def parent = path.parent
- def name = path.name
- def isSourceFile = path.hasExtension("java", "scala")
-
- private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim }
- lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".")
- lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "."
-
- override def toString = path.path
-}
-
-object Fileish {
- def apply(f: File): Fileish = new Fileish(f, () => f.inputStream())
- def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in)
- def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in)
-}
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index e919621338..ee3e2b04d1 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -10,7 +10,6 @@ import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException,
import java.util.jar._
import scala.collection.JavaConverters._
import Attributes.Name
-import util.ClassPath
import scala.language.implicitConversions
// Attributes.Name instances:
@@ -37,9 +36,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
def this(jfile: JFile) = this(File(jfile))
def this(path: String) = this(File(path))
- protected def errorFn(msg: String): Unit = Console println msg
-
- lazy val jarFile = new JarFile(file.jfile)
lazy val manifest = withJarInput(s => Option(s.getManifest))
def mainClass = manifest map (f => f(Name.MAIN_CLASS))
@@ -51,6 +47,20 @@ class Jar(file: File) extends Iterable[JarEntry] {
case _ => Nil
}
+ /** Invoke f with input for named jar entry (or None). */
+ def withEntryStream[A](name: String)(f: Option[InputStream] => A) = {
+ val jarFile = new JarFile(file.jfile)
+ def apply() =
+ jarFile getEntry name match {
+ case null => f(None)
+ case entry =>
+ val in = Some(jarFile getInputStream entry)
+ try f(in)
+ finally in map (_.close())
+ }
+ try apply() finally jarFile.close()
+ }
+
def withJarInput[T](f: JarInputStream => T): T = {
val in = new JarInputStream(file.inputStream())
try f(in)
@@ -64,12 +74,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f
}
override def iterator: Iterator[JarEntry] = this.toList.iterator
- def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x)))
-
- private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match {
- case null => errorFn("No such entry: " + entry) ; null
- case x => x
- }
override def toString = "" + file
}
@@ -111,9 +115,9 @@ class JarWriter(val file: File, val manifest: Manifest) {
val buf = new Array[Byte](10240)
def loop(): Unit = in.read(buf, 0, buf.length) match {
case -1 => in.close()
- case n => out.write(buf, 0, n) ; loop
+ case n => out.write(buf, 0, n) ; loop()
}
- loop
+ loop()
}
def close() = out.close()
@@ -131,7 +135,6 @@ object Jar {
m
}
def apply(manifest: JManifest): WManifest = new WManifest(manifest)
- implicit def unenrichManifest(x: WManifest): JManifest = x.underlying
}
class WManifest(manifest: JManifest) {
for ((k, v) <- initialMainAttrs)
@@ -148,12 +151,7 @@ object Jar {
}
def apply(name: Attributes.Name): String = attrs(name)
- def apply(name: String): String = apply(new Attributes.Name(name))
def update(key: Attributes.Name, value: String) = attrs.put(key, value)
- def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value)
-
- def mainClass: String = apply(Name.MAIN_CLASS)
- def mainClass_=(value: String) = update(Name.MAIN_CLASS, value)
}
// See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html
@@ -161,7 +159,7 @@ object Jar {
private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
- def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true)
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, examineFile = true)
def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
index 5ffb5b4d4f..7c6dbe2e60 100644
--- a/src/compiler/scala/tools/nsc/io/Lexer.scala
+++ b/src/compiler/scala/tools/nsc/io/Lexer.scala
@@ -1,16 +1,14 @@
package scala.tools.nsc.io
-import java.io.{Reader, Writer, StringReader, StringWriter}
-import scala.collection.mutable.{Buffer, ArrayBuffer}
-import scala.math.BigInt
+import java.io.Reader
/** Companion object of class `Lexer` which defines tokens and some utility concepts
* used for tokens and lexers
*/
object Lexer {
- /** An exception raised if a if input does not correspond to what's expected
- * @param rdr the lexer form which the bad input is read
+ /** An exception raised if an input does not correspond to what's expected
+ * @param rdr the lexer from which the bad input is read
* @param msg the error message
*/
class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
@@ -50,7 +48,7 @@ object Lexer {
/** The '`(`' token */
val LParen = new Delim('(')
- /** The '`(`' token */
+ /** The '`)`' token */
val RParen = new Delim(')')
/** The '`{`' token */
@@ -280,7 +278,7 @@ class Lexer(rd: Reader) {
/** The current token is a delimiter consisting of given character, reads next token,
* otherwise raises an error.
- * @param c the given delimiter character to compare current token with
+ * @param ch the given delimiter character to compare current token with
* @throws MalformedInput if the current token `token` is not a delimiter, or
* consists of a character different from `c`.
*/
diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala
deleted file mode 100644
index 2f0a71fc60..0000000000
--- a/src/compiler/scala/tools/nsc/io/MsilFile.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import ch.epfl.lamp.compiler.msil.{ Type => MsilType, _ }
-
-/** This class wraps an MsilType. It exists only so
- * ClassPath can treat all of JVM/MSIL/bin/src files
- * uniformly, as AbstractFiles.
- */
-class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) {
-}
-
-object NoMsilFile extends MsilFile(null) { }
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index b03a921e87..0e7da37c52 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -1,6 +1,5 @@
package scala.tools.nsc.io
-import scala.annotation.unchecked
import Lexer._
import java.io.Writer
import scala.language.implicitConversions
@@ -19,7 +18,7 @@ import scala.reflect.ClassTag
* Subclasses of `Pickler` each can write and read individual classes
* of values.
*
- * @param T the type of values handled by this pickler.
+ * @tparam T the type of values handled by this pickler.
*
* These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
* Iulian Dragos' picklers for Scala to XML. See:
@@ -71,17 +70,9 @@ abstract class Pickler[T] {
*/
def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
- /** A pickler obtained from the current pickler by also admitting `null` as
- * a handled value, represented as the token `null`.
- *
- * @param fromNull an implicit evidence parameter ensuring that the type of values
- * handled by this pickler contains `null`.
- */
- def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this)
-
/** A conditional pickler obtained from the current pickler.
- * @param cond the condition to test to find out whether pickler can handle
- * some Scala value.
+ * @param p the condition to test to find out whether pickler can handle
+ * some Scala value.
*/
def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
@@ -93,13 +84,10 @@ abstract class Pickler[T] {
}
object Pickler {
-
- var picklerDebugMode = false
-
/** A base class representing unpickler result. It has two subclasses:
* `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
* where a value of the given type `T` could not be unpickled from input.
- * @param T the type of unpickled values in case of success.
+ * @tparam T the type of unpickled values in case of success.
*/
abstract class Unpickled[+T] {
/** Transforms success values to success values using given function,
@@ -137,7 +125,7 @@ object Pickler {
}
/** A class representing successful unpicklings
- * @param T the type of the unpickled value
+ * @tparam T the type of the unpickled value
* @param result the unpickled value
*/
case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
@@ -175,22 +163,11 @@ object Pickler {
def ~ [T](y: T): S ~ T = new ~ (x, y)
}
- /** A converter from binary functions to functions over `~`-pairs
- */
- implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
-
- /** An converter from unctions returning Options over pair to functions returning `~`-pairs
- * The converted function will raise a `MatchError` where the original function returned
- * a `None`. This converter is useful for turning `unapply` methods of case classes
- * into wrapper methods that can be passed as second argument to `wrap`.
- */
- implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } }
-
/** Same as `p.labelled(label)`.
*/
def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
def pickle(wr: Writer, x: T) = {
- wr.write(quoted(label));
+ wr.write(quoted(label))
wr.write("(")
p.pickle(wr, x)
wr.write(")")
@@ -249,16 +226,6 @@ object Pickler {
def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
}
- /** Same as `p.orNull`
- */
- def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] {
- def pickle(wr: Writer, x: T) =
- if (x == null) wr.write("null") else p.pickle(wr, x)
- def unpickle(rd: Lexer): Unpickled[T] =
- if (rd.token == NullLit) nextSuccess(rd, fromNull(null))
- else p.unpickle(rd)
- }
-
/** A conditional pickler for singleton objects. It represents these
* with the object's underlying class as a label.
* Example: Object scala.None would be represented as `scala.None$()`.
@@ -330,22 +297,9 @@ object Pickler {
implicit val longPickler: Pickler[Long] =
tokenPickler("integer literal") { case IntLit(s) => s.toLong }
- /** A pickler for values of type `Double`, represented as floating point literals */
- implicit val doublePickler: Pickler[Double] =
- tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble }
-
- /** A pickler for values of type `Byte`, represented as integer literals */
- implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong }
-
- /** A pickler for values of type `Short`, represented as integer literals */
- implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong }
-
/** A pickler for values of type `Int`, represented as integer literals */
implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
- /** A pickler for values of type `Float`, represented as floating point literals */
- implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong }
-
/** A conditional pickler for the boolean value `true` */
private val truePickler =
tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
@@ -373,11 +327,6 @@ object Pickler {
}
}
- /** A pickler for values of type `Char`, represented as string literals of length 1 */
- implicit val charPickler: Pickler[Char] =
- stringPickler
- .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString }
-
/** A pickler for pairs, represented as `~`-pairs */
implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
(pkl[T1] ~ pkl[T2])
@@ -390,35 +339,9 @@ object Pickler {
.wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
.labelled ("tuple3")
- /** A pickler for 4-tuples, represented as `~`-tuples */
- implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] =
- (p1 ~ p2 ~ p3 ~ p4)
- .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 }
- .labelled ("tuple4")
-
- /** A conditional pickler for the `scala.None` object */
- implicit val nonePickler = singletonPickler(None)
-
- /** A conditional pickler for instances of class `scala.Some` */
- implicit def somePickler[T: Pickler]: CondPickler[Some[T]] =
- pkl[T]
- .wrapped { Some(_) } { _.get }
- .asClass (classOf[Some[T]])
-
- /** A pickler for optional values */
- implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T]
-
/** A pickler for list values */
implicit def listPickler[T: Pickler]: Pickler[List[T]] =
iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
-
- /** A pickler for vector values */
- implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] =
- iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
-
- /** A pickler for array values */
- implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] =
- iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
}
/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
@@ -444,8 +367,8 @@ abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T]
* To unpickle a value, this unpickler is tried first. If it cannot read
* the input (as indicated by a `UnpickleFailure` result), then the
* alternative pickler is tried.
- * @param V The handled type of the returned pickler.
- * @param U The handled type of the alternative pickler.
+ * @tparam V The handled type of the returned pickler.
+ * @tparam U The handled type of the alternative pickler.
* @param that The alternative pickler.
*/
def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/compiler/scala/tools/nsc/io/Replayer.scala
index 5cb61b6cb1..e3dc8939a3 100644
--- a/src/compiler/scala/tools/nsc/io/Replayer.scala
+++ b/src/compiler/scala/tools/nsc/io/Replayer.scala
@@ -3,7 +3,7 @@ package scala.tools.nsc.io
import java.io.{Reader, Writer}
import Pickler._
-import Lexer.{Token, EOF}
+import Lexer.EOF
abstract class LogReplay {
def logreplay(event: String, x: => Boolean): Boolean
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
index e766c1b2fd..4925c50d85 100644
--- a/src/compiler/scala/tools/nsc/io/Socket.scala
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -28,13 +28,10 @@ object Socket {
private val optHandler = handlerFn[Option[T]](_ => None)
private val eitherHandler = handlerFn[Either[Throwable, T]](x => Left(x))
- def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt
def either: Either[Throwable, T] = try Right(f()) catch eitherHandler
def opt: Option[T] = try Some(f()) catch optHandler
}
- def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0)))
- def newServer(port: Int = 0) = new Box(() => new ServerSocket(0))
def localhost(port: Int) = apply(InetAddress.getLocalHost(), port)
def apply(host: InetAddress, port: Int) = new Box(() => new Socket(new JSocket(host, port)))
def apply(host: String, port: Int) = new Box(() => new Socket(new JSocket(host, port)))
@@ -62,4 +59,4 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable {
out.close()
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index 569270f530..3220c2e2b2 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -9,7 +9,7 @@ package io
import java.io.{ FileInputStream, InputStream, IOException }
import java.nio.{ByteBuffer, CharBuffer}
-import java.nio.channels.{FileChannel, ReadableByteChannel, Channels}
+import java.nio.channels.{ ReadableByteChannel, Channels }
import java.nio.charset.{CharsetDecoder, CoderResult}
import scala.tools.nsc.reporters._
@@ -33,9 +33,6 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
"Please try specifying another one using the -encoding option")
}
- /** Reads the file with the specified name. */
- def read(filename: String): Array[Char]= read(new JFile(filename))
-
/** Reads the specified file. */
def read(file: JFile): Array[Char] = {
val c = new FileInputStream(file).getChannel
@@ -77,7 +74,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
protected def read(bytes: ByteBuffer): Array[Char] = {
val decoder: CharsetDecoder = this.decoder.reset()
val chars: CharBuffer = this.chars; chars.clear()
- terminate(flush(decoder, decode(decoder, bytes, chars, true)))
+ terminate(flush(decoder, decode(decoder, bytes, chars, endOfInput = true)))
}
//########################################################################
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 711696bb6e..0b2db115fb 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
import java.util.concurrent.{ Future, Callable }
import java.util.{ Timer, TimerTask }
-import java.util.jar.{ Attributes }
import scala.language.implicitConversions
package object io {
@@ -21,14 +20,10 @@ package object io {
type Path = scala.reflect.io.Path
val Path = scala.reflect.io.Path
type PlainFile = scala.reflect.io.PlainFile
- val PlainFile = scala.reflect.io.PlainFile
val Streamable = scala.reflect.io.Streamable
type VirtualDirectory = scala.reflect.io.VirtualDirectory
type VirtualFile = scala.reflect.io.VirtualFile
- val ZipArchive = scala.reflect.io.ZipArchive
type ZipArchive = scala.reflect.io.ZipArchive
-
- implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
@@ -39,23 +34,11 @@ package object io {
def runnable(body: => Unit): Runnable = new Runnable { override def run() = body }
def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
- def submit(runnable: Runnable) = daemonThreadPool submit runnable
- // Create, start, and return a daemon thread
- def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body)
def newThread(f: Thread => Unit)(body: => Unit): Thread = {
val thread = new Thread(runnable(body))
f(thread)
thread.start
thread
}
-
- // Set a timer to execute the given code.
- def timer(seconds: Int)(body: => Unit): Timer = {
- val alarm = new Timer(true) // daemon
- val tt = new TimerTask { def run() = body }
-
- alarm.schedule(tt, seconds * 1000)
- alarm
- }
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 43a8402fc7..cf40fe90fa 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -35,7 +35,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
abstract class JavaParser extends ParserCommon {
val in: JavaScanner
- protected def posToReport: Int = in.currentPos
def freshName(prefix : String): Name
protected implicit def i2p(offset : Int) : Position
private implicit def p2i(pos : Position): Int = if (pos.isDefined) pos.point else -1
@@ -75,7 +74,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
nbraces += 1
case _ =>
}
- in.nextToken
+ in.nextToken()
}
}
@@ -94,11 +93,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (skipIt)
skip()
}
- def warning(msg: String) : Unit = warning(in.currentPos, msg)
-
def errorTypeTree = TypeTree().setType(ErrorType) setPos in.currentPos
- def errorTermTree = Literal(Constant(null)) setPos in.currentPos
- def errorPatternTree = blankExpr setPos in.currentPos
// --------- tree building -----------------------------
@@ -130,7 +125,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
makeParam(nme.syntheticParamName(count), tpt)
def makeParam(name: String, tpt: Tree): ValDef =
- makeParam(newTypeName(name), tpt)
+ makeParam(name: TermName, tpt)
def makeParam(name: TermName, tpt: Tree): ValDef =
ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree)
@@ -153,7 +148,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
nbraces += 1
case _ =>
}
- in.nextToken
+ in.nextToken()
in.token match {
case RPAREN =>
nparens -= 1
@@ -168,7 +163,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (!(tokens contains in.token) && in.token != EOF) {
if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
- else in.nextToken
+ else in.nextToken()
}
}
@@ -178,18 +173,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def accept(token: Int): Int = {
val pos = in.currentPos
if (in.token != token) {
- val posToReport =
- //if (in.currentPos.line(unit.source).get(0) > in.lastPos.line(unit.source).get(0))
- // in.lastPos
- //else
- in.currentPos
+ val posToReport = in.currentPos
val msg =
JavaScannerConfiguration.token2string(token) + " expected but " +
JavaScannerConfiguration.token2string(in.token) + " found."
- syntaxError(posToReport, msg, true)
+ syntaxError(posToReport, msg, skipIt = true)
}
- if (in.token == token) in.nextToken
+ if (in.token == token) in.nextToken()
pos
}
@@ -209,7 +200,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def ident(): Name =
if (in.token == IDENTIFIER) {
val name = in.name
- in.nextToken
+ in.nextToken()
name
} else {
accept(IDENTIFIER)
@@ -219,7 +210,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def repsep[T <: Tree](p: () => T, sep: Int): List[T] = {
val buf = ListBuffer[T](p())
while (in.token == sep) {
- in.nextToken
+ in.nextToken()
buf += p()
}
buf.toList
@@ -233,7 +224,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case AppliedTypeTree(_, _) | ExistentialTypeTree(_, _) | SelectFromTypeTree(_, _) =>
tree
case _ =>
- syntaxError(tree.pos, "identifier expected", false)
+ syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
@@ -243,7 +234,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def qualId(): RefTree = {
var t: RefTree = atPos(in.currentPos) { Ident(ident()) }
while (in.token == DOT) {
- in.nextToken
+ in.nextToken()
t = atPos(in.currentPos) { Select(t, ident()) }
}
t
@@ -252,7 +243,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def optArrayBrackets(tpt: Tree): Tree =
if (in.token == LBRACKET) {
val tpt1 = atPos(in.pos) { arrayOf(tpt) }
- in.nextToken
+ in.nextToken()
accept(RBRACKET)
optArrayBrackets(tpt1)
} else tpt
@@ -260,21 +251,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def basicType(): Tree =
atPos(in.pos) {
in.token match {
- case BYTE => in.nextToken; TypeTree(ByteClass.tpe)
- case SHORT => in.nextToken; TypeTree(ShortClass.tpe)
- case CHAR => in.nextToken; TypeTree(CharClass.tpe)
- case INT => in.nextToken; TypeTree(IntClass.tpe)
- case LONG => in.nextToken; TypeTree(LongClass.tpe)
- case FLOAT => in.nextToken; TypeTree(FloatClass.tpe)
- case DOUBLE => in.nextToken; TypeTree(DoubleClass.tpe)
- case BOOLEAN => in.nextToken; TypeTree(BooleanClass.tpe)
- case _ => syntaxError("illegal start of type", true); errorTypeTree
+ case BYTE => in.nextToken(); TypeTree(ByteClass.tpe)
+ case SHORT => in.nextToken(); TypeTree(ShortClass.tpe)
+ case CHAR => in.nextToken(); TypeTree(CharClass.tpe)
+ case INT => in.nextToken(); TypeTree(IntClass.tpe)
+ case LONG => in.nextToken(); TypeTree(LongClass.tpe)
+ case FLOAT => in.nextToken(); TypeTree(FloatClass.tpe)
+ case DOUBLE => in.nextToken(); TypeTree(DoubleClass.tpe)
+ case BOOLEAN => in.nextToken(); TypeTree(BooleanClass.tpe)
+ case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree
}
}
def typ(): Tree =
optArrayBrackets {
- if (in.token == FINAL) in.nextToken
+ if (in.token == FINAL) in.nextToken()
if (in.token == IDENTIFIER) {
var t = typeArgs(atPos(in.currentPos)(Ident(ident())))
// typeSelect generates Select nodes is the lhs is an Ident or Select,
@@ -287,7 +278,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case _ => SelectFromTypeTree(t, name.toTypeName)
}
while (in.token == DOT) {
- in.nextToken
+ in.nextToken()
t = typeArgs(atPos(in.currentPos)(typeSelect(t, ident())))
}
convertToTypeId(t)
@@ -301,14 +292,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeArg(): Tree =
if (in.token == QMARK) {
val pos = in.currentPos
- in.nextToken
+ in.nextToken()
var lo: Tree = TypeTree(NothingClass.tpe)
var hi: Tree = TypeTree(AnyClass.tpe)
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
hi = typ()
} else if (in.token == SUPER) {
- in.nextToken
+ in.nextToken()
lo = typ()
}
val tdef = atPos(pos) {
@@ -324,7 +315,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
typ()
}
if (in.token == LT) {
- in.nextToken
+ in.nextToken()
val t1 = convertToTypeId(t)
val args = repsep(typeArg, COMMA)
acceptClosingAngle()
@@ -339,7 +330,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def annotations(): List[Tree] = {
//var annots = new ListBuffer[Tree]
while (in.token == AT) {
- in.nextToken
+ in.nextToken()
annotation()
}
List() // don't pass on annotations for now
@@ -348,46 +339,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
/** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
*/
def annotation() {
- val pos = in.currentPos
- var t = qualId()
+ qualId()
if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
}
-/*
- def annotationArg() = {
- val pos = in.token
- if (in.token == IDENTIFIER && in.lookaheadToken == ASSIGN) {
- val name = ident()
- accept(ASSIGN)
- atPos(pos) {
- ValDef(Modifiers(Flags.JAVA), name, TypeTree(), elementValue())
- }
- } else {
- elementValue()
- }
- }
-
- def elementValue(): Tree =
- if (in.token == AT) annotation()
- else if (in.token == LBRACE) elementValueArrayInitializer()
- else expression1()
-
- def elementValueArrayInitializer() = {
- accept(LBRACE)
- val buf = new ListBuffer[Tree]
- def loop() =
- if (in.token != RBRACE) {
- buf += elementValue()
- if (in.token == COMMA) {
- in.nextToken
- loop()
- }
- }
- loop()
- accept(RBRACE)
- buf.toList
- }
- */
def modifiers(inInterface: Boolean): Modifiers = {
var flags: Long = Flags.JAVA
@@ -399,38 +354,38 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (true) {
in.token match {
case AT if (in.lookaheadToken != INTERFACE) =>
- in.nextToken
+ in.nextToken()
annotation()
case PUBLIC =>
isPackageAccess = false
- in.nextToken
+ in.nextToken()
case PROTECTED =>
flags |= Flags.PROTECTED
- in.nextToken
+ in.nextToken()
case PRIVATE =>
isPackageAccess = false
flags |= Flags.PRIVATE
- in.nextToken
+ in.nextToken()
case STATIC =>
flags |= Flags.STATIC
- in.nextToken
+ in.nextToken()
case ABSTRACT =>
flags |= Flags.ABSTRACT
- in.nextToken
+ in.nextToken()
case FINAL =>
flags |= Flags.FINAL
- in.nextToken
+ in.nextToken()
case NATIVE =>
addAnnot(NativeAttr)
- in.nextToken
+ in.nextToken()
case TRANSIENT =>
addAnnot(TransientAttr)
- in.nextToken
+ in.nextToken()
case VOLATILE =>
addAnnot(VolatileAttr)
- in.nextToken
+ in.nextToken()
case SYNCHRONIZED | STRICTFP =>
- in.nextToken
+ in.nextToken()
case _ =>
val privateWithin: TypeName =
if (isPackageAccess && !inInterface) thisPackageName
@@ -444,7 +399,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeParams(): List[TypeDef] =
if (in.token == LT) {
- in.nextToken
+ in.nextToken()
val tparams = repsep(typeParam, COMMA)
acceptClosingAngle()
tparams
@@ -455,7 +410,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val name = identForType()
val hi =
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
bound()
} else {
scalaDot(tpnme.Any)
@@ -468,7 +423,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
atPos(in.currentPos) {
val buf = ListBuffer[Tree](typ())
while (in.token == AMP) {
- in.nextToken
+ in.nextToken()
buf += typ()
}
val ts = buf.toList
@@ -484,21 +439,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
def formalParam(): ValDef = {
- if (in.token == FINAL) in.nextToken
+ if (in.token == FINAL) in.nextToken()
annotations()
var t = typ()
if (in.token == DOTDOTDOT) {
- in.nextToken
+ in.nextToken()
t = atPos(t.pos) {
AppliedTypeTree(scalaDot(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), List(t))
}
}
- varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident())
+ varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident().toTermName)
}
def optThrows() {
if (in.token == THROWS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
}
}
@@ -517,7 +472,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val isVoid = in.token == VOID
var rtpt =
if (isVoid) {
- in.nextToken
+ in.nextToken()
TypeTree(UnitClass.tpe) setPos in.pos
} else typ()
var pos = in.currentPos
@@ -551,7 +506,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
}
mods1 = mods1 withAnnotations List(annot)
skipTo(SEMI)
@@ -587,18 +542,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
* these potential definitions are real or not.
*/
def fieldDecls(pos: Position, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
- val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name))
+ val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name.toTermName))
val maybe = new ListBuffer[Tree] // potential variable definitions.
while (in.token == COMMA) {
- in.nextToken
+ in.nextToken()
if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
val name = ident()
if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
buf ++= maybe
- buf += varDecl(in.currentPos, mods, tpt.duplicate, name)
+ buf += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
maybe.clear()
} else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not.
- maybe += varDecl(in.currentPos, mods, tpt.duplicate, name)
+ maybe += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
} else { // ... if there's something else we were still in the initializer of the
// previous var def; skip to next comma or semicolon.
skipTo(COMMA, SEMI)
@@ -671,25 +626,25 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def collectIdents() : Int = {
if (in.token == ASTERISK) {
val starOffset = in.pos
- in.nextToken
+ in.nextToken()
buf += nme.WILDCARD
starOffset
} else {
val nameOffset = in.pos
buf += ident()
if (in.token == DOT) {
- in.nextToken
+ in.nextToken()
collectIdents()
} else nameOffset
}
}
- if (in.token == STATIC) in.nextToken
+ if (in.token == STATIC) in.nextToken()
else buf += nme.ROOTPKG
val lastnameOffset = collectIdents()
accept(SEMI)
val names = buf.toList
if (names.length < 2) {
- syntaxError(pos, "illegal import", false)
+ syntaxError(pos, "illegal import", skipIt = false)
List()
} else {
val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _))
@@ -704,7 +659,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def interfacesOpt() =
if (in.token == IMPLEMENTS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
} else {
List()
@@ -717,7 +672,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val tparams = typeParams()
val superclass =
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
typ()
} else {
javaLangObject()
@@ -736,10 +691,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val tparams = typeParams()
val parents =
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
} else {
- List(javaLangObject)
+ List(javaLangObject())
}
val (statics, body) = typeBody(INTERFACE, name)
addCompanionObject(statics, atPos(pos) {
@@ -766,7 +721,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
skipAhead() // skip init block, we just assume we have seen only static
accept(RBRACE)
} else if (in.token == SEMI) {
- in.nextToken
+ in.nextToken()
} else {
if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC
val decls = memberDecl(mods, parentToken)
@@ -824,7 +779,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (in.token != RBRACE && in.token != SEMI && in.token != EOF) {
buf += enumConst(enumType)
if (in.token == COMMA) {
- in.nextToken
+ in.nextToken()
parseEnumConsts()
}
}
@@ -833,7 +788,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val consts = buf.toList
val (statics, body) =
if (in.token == SEMI) {
- in.nextToken
+ in.nextToken()
typeBodyDecls(ENUM, name)
} else {
(List(), List())
@@ -875,7 +830,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// The STABLE flag is to signal to namer that this was read from a
// java enum, and so should be given a Constant type (thereby making
// it usable in annotations.)
- ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
+ ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr)
}
}
@@ -884,13 +839,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case INTERFACE => interfaceDecl(mods)
case AT => annotationDecl(mods)
case CLASS => classDecl(mods)
- case _ => in.nextToken; syntaxError("illegal start of type declaration", true); List(errorTypeTree)
+ case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
}
/** CompilationUnit ::= [package QualId semi] TopStatSeq
*/
def compilationUnit(): Tree = {
- var pos = in.currentPos;
+ var pos = in.currentPos
val pkg: RefTree =
if (in.token == AT || in.token == PACKAGE) {
annotations()
@@ -910,9 +865,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (in.token == IMPORT)
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
- while (in.token == SEMI) in.nextToken
+ while (in.token == SEMI) in.nextToken()
if (in.token != EOF)
- buf ++= typeDecl(modifiers(false))
+ buf ++= typeDecl(modifiers(inInterface = false))
}
accept(EOF)
atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index e230585a8b..f9b1e57e66 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -10,7 +10,7 @@ import scala.tools.nsc.util.JavaCharArrayReader
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import JavaTokens._
-import scala.annotation.switch
+import scala.annotation.{ switch, tailrec }
import scala.language.implicitConversions
// Todo merge these better with Scanners
@@ -57,23 +57,14 @@ trait JavaScanners extends ast.parser.ScannersCommon {
/** ...
*/
abstract class AbstractJavaScanner extends AbstractJavaTokenData {
- implicit def p2g(pos: Position): ScanPosition
implicit def g2p(pos: ScanPosition): Position
- /** the last error position
- */
- var errpos: ScanPosition
- var lastPos: ScanPosition
- def skipToken: ScanPosition
def nextToken(): Unit
def next: AbstractJavaTokenData
def intVal(negated: Boolean): Long
def floatVal(negated: Boolean): Double
- def intVal: Long = intVal(false)
- def floatVal: Double = floatVal(false)
- //def token2string(token : Int) : String = configuration.token2string(token)
- /** return recent scala doc, if any */
- def flushDoc: DocComment
+ def intVal: Long = intVal(negated = false)
+ def floatVal: Double = floatVal(negated = false)
def currentPos: Position
}
@@ -227,17 +218,9 @@ trait JavaScanners extends ast.parser.ScannersCommon {
abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon {
override def intVal = super.intVal// todo: needed?
override def floatVal = super.floatVal
- override var errpos: Int = NoPos
def currentPos: Position = g2p(pos - 1)
-
var in: JavaCharArrayReader = _
- def dup: JavaScanner = {
- val dup = clone().asInstanceOf[JavaScanner]
- dup.in = in.dup
- dup
- }
-
/** character buffer for literals
*/
val cbuf = new StringBuilder()
@@ -252,22 +235,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
cbuf.setLength(0)
}
- /** buffer for the documentation comment
- */
- var docBuffer: StringBuilder = null
-
- def flushDoc: DocComment = {
- val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null
- docBuffer = null
- ret
- }
-
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
-
private class JavaTokenData0 extends JavaTokenData
/** we need one token lookahead
@@ -277,13 +244,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
// Get next token ------------------------------------------------------------
- /** read next token and return last position
- */
- def skipToken: Int = {
- val p = pos; nextToken
- p - 1
- }
-
def nextToken() {
if (next.token == EMPTY) {
fetchToken()
@@ -296,7 +256,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def lookaheadToken: Int = {
prev copyFrom this
- nextToken
+ nextToken()
val t = token
next copyFrom this
this copyFrom prev
@@ -308,11 +268,10 @@ trait JavaScanners extends ast.parser.ScannersCommon {
private def fetchToken() {
if (token == EOF) return
lastPos = in.cpos - 1
- //var index = bp
while (true) {
in.ch match {
case ' ' | '\t' | CR | LF | FF =>
- in.next
+ in.next()
case _ =>
pos = in.cpos
(in.ch: @switch) match {
@@ -329,47 +288,47 @@ trait JavaScanners extends ast.parser.ScannersCommon {
'u' | 'v' | 'w' | 'x' | 'y' |
'z' =>
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
return
case '0' =>
putChar(in.ch)
- in.next
+ in.next()
if (in.ch == 'x' || in.ch == 'X') {
- in.next
+ in.next()
base = 16
} else {
base = 8
}
- getNumber
+ getNumber()
return
case '1' | '2' | '3' | '4' |
'5' | '6' | '7' | '8' | '9' =>
base = 10
- getNumber
+ getNumber()
return
case '\"' =>
- in.next
+ in.next()
while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) {
getlitch()
}
if (in.ch == '\"') {
token = STRINGLIT
setName()
- in.next
+ in.next()
} else {
syntaxError("unclosed string literal")
}
return
case '\'' =>
- in.next
+ in.next()
getlitch()
if (in.ch == '\'') {
- in.next
+ in.next()
token = CHARLIT
setName()
} else {
@@ -379,31 +338,31 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case '=' =>
token = ASSIGN
- in.next
+ in.next()
if (in.ch == '=') {
token = EQEQ
- in.next
+ in.next()
}
return
case '>' =>
token = GT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTEQ
- in.next
+ in.next()
} else if (in.ch == '>') {
token = GTGT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTGTEQ
- in.next
+ in.next()
} else if (in.ch == '>') {
token = GTGTGT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTGTGTEQ
- in.next
+ in.next()
}
}
}
@@ -411,145 +370,145 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case '<' =>
token = LT
- in.next
+ in.next()
if (in.ch == '=') {
token = LTEQ
- in.next
+ in.next()
} else if (in.ch == '<') {
token = LTLT
- in.next
+ in.next()
if (in.ch == '=') {
token = LTLTEQ
- in.next
+ in.next()
}
}
return
case '!' =>
token = BANG
- in.next
+ in.next()
if (in.ch == '=') {
token = BANGEQ
- in.next
+ in.next()
}
return
case '~' =>
token = TILDE
- in.next
+ in.next()
return
case '?' =>
token = QMARK
- in.next
+ in.next()
return
case ':' =>
token = COLON
- in.next
+ in.next()
return
case '@' =>
token = AT
- in.next
+ in.next()
return
case '&' =>
token = AMP
- in.next
+ in.next()
if (in.ch == '&') {
token = AMPAMP
- in.next
+ in.next()
} else if (in.ch == '=') {
token = AMPEQ
- in.next
+ in.next()
}
return
case '|' =>
token = BAR
- in.next
+ in.next()
if (in.ch == '|') {
token = BARBAR
- in.next
+ in.next()
} else if (in.ch == '=') {
token = BAREQ
- in.next
+ in.next()
}
return
case '+' =>
token = PLUS
- in.next
+ in.next()
if (in.ch == '+') {
token = PLUSPLUS
- in.next
+ in.next()
} else if (in.ch == '=') {
token = PLUSEQ
- in.next
+ in.next()
}
return
case '-' =>
token = MINUS
- in.next
+ in.next()
if (in.ch == '-') {
token = MINUSMINUS
- in.next
+ in.next()
} else if (in.ch == '=') {
token = MINUSEQ
- in.next
+ in.next()
}
return
case '*' =>
token = ASTERISK
- in.next
+ in.next()
if (in.ch == '=') {
token = ASTERISKEQ
- in.next
+ in.next()
}
return
case '/' =>
- in.next
+ in.next()
if (!skipComment()) {
token = SLASH
- in.next
+ in.next()
if (in.ch == '=') {
token = SLASHEQ
- in.next
+ in.next()
}
return
}
case '^' =>
token = HAT
- in.next
+ in.next()
if (in.ch == '=') {
token = HATEQ
- in.next
+ in.next()
}
return
case '%' =>
token = PERCENT
- in.next
+ in.next()
if (in.ch == '=') {
token = PERCENTEQ
- in.next
+ in.next()
}
return
case '.' =>
token = DOT
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '9') {
- putChar('.'); getFraction
+ putChar('.'); getFraction()
} else if (in.ch == '.') {
- in.next
+ in.next()
if (in.ch == '.') {
- in.next
+ in.next()
token = DOTDOTDOT
} else syntaxError("`.' character expected")
}
@@ -557,60 +516,60 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case ';' =>
token = SEMI
- in.next
+ in.next()
return
case ',' =>
token = COMMA
- in.next
+ in.next()
return
case '(' =>
token = LPAREN
- in.next
+ in.next()
return
case '{' =>
token = LBRACE
- in.next
+ in.next()
return
case ')' =>
token = RPAREN
- in.next
+ in.next()
return
case '}' =>
token = RBRACE
- in.next
+ in.next()
return
case '[' =>
token = LBRACKET
- in.next
+ in.next()
return
case ']' =>
token = RBRACKET
- in.next
+ in.next()
return
case SU =>
if (!in.hasNext) token = EOF
else {
syntaxError("illegal character")
- in.next
+ in.next()
}
return
case _ =>
if (Character.isUnicodeIdentifierStart(in.ch)) {
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
} else {
syntaxError("illegal character: "+in.ch.toInt)
- in.next
+ in.next()
}
return
}
@@ -618,33 +577,20 @@ trait JavaScanners extends ast.parser.ScannersCommon {
}
}
- private def skipComment(): Boolean = {
- if (in.ch == '/') {
- do {
- in.next
- } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
- true
- } else if (in.ch == '*') {
- docBuffer = null
- in.next
- val scalaDoc = ("/**", "*/")
- if (in.ch == '*' && forScaladoc)
- docBuffer = new StringBuilder(scalaDoc._1)
- do {
- do {
- if (in.ch != '*' && in.ch != SU) {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '*' && in.ch != SU)
- while (in.ch == '*') {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '/' && in.ch != SU)
- if (in.ch == '/') in.next
- else incompleteInputError("unclosed comment")
- true
- } else {
- false
+ protected def skipComment(): Boolean = {
+ @tailrec def skipLineComment(): Unit = in.ch match {
+ case CR | LF | SU =>
+ case _ => in.next; skipLineComment()
+ }
+ @tailrec def skipJavaComment(): Unit = in.ch match {
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment()
+ case _ => in.next; skipJavaComment()
+ }
+ in.ch match {
+ case '/' => in.next ; skipLineComment() ; true
+ case '*' => in.next ; skipJavaComment() ; true
+ case _ => false
}
}
@@ -668,12 +614,12 @@ trait JavaScanners extends ast.parser.ScannersCommon {
'0' | '1' | '2' | '3' | '4' |
'5' | '6' | '7' | '8' | '9' =>
putChar(in.ch)
- in.next
+ in.next()
case '_' =>
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
return
case SU =>
setName()
@@ -682,7 +628,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case _ =>
if (Character.isUnicodeIdentifierPart(in.ch)) {
putChar(in.ch)
- in.next
+ in.next()
} else {
setName()
token = JavaScannerConfiguration.name2token(name)
@@ -698,17 +644,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*/
protected def getlitch() =
if (in.ch == '\\') {
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '7') {
val leadch: Char = in.ch
var oct: Int = digit2int(in.ch, 8)
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '7') {
oct = oct * 8 + digit2int(in.ch, 8)
- in.next
+ in.next()
if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') {
oct = oct * 8 + digit2int(in.ch, 8)
- in.next
+ in.next()
}
}
putChar(oct.asInstanceOf[Char])
@@ -726,11 +672,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
syntaxError(in.cpos - 1, "invalid escape character")
putChar(in.ch)
}
- in.next
+ in.next()
}
} else {
putChar(in.ch)
- in.next
+ in.next()
}
/** read fractional part and exponent of floating point number
@@ -740,35 +686,35 @@ trait JavaScanners extends ast.parser.ScannersCommon {
token = DOUBLELIT
while ('0' <= in.ch && in.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
}
if (in.ch == 'e' || in.ch == 'E') {
val lookahead = in.copy
- lookahead.next
+ lookahead.next()
if (lookahead.ch == '+' || lookahead.ch == '-') {
- lookahead.next
+ lookahead.next()
}
if ('0' <= lookahead.ch && lookahead.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
if (in.ch == '+' || in.ch == '-') {
putChar(in.ch)
- in.next
+ in.next()
}
while ('0' <= in.ch && in.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
}
}
token = DOUBLELIT
}
if (in.ch == 'd' || in.ch == 'D') {
putChar(in.ch)
- in.next
+ in.next()
token = DOUBLELIT
} else if (in.ch == 'f' || in.ch == 'F') {
putChar(in.ch)
- in.next
+ in.next()
token = FLOATLIT
}
setName()
@@ -828,23 +774,23 @@ trait JavaScanners extends ast.parser.ScannersCommon {
protected def getNumber() {
while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
putChar(in.ch)
- in.next
+ in.next()
}
token = INTLIT
if (base <= 10 && in.ch == '.') {
val lookahead = in.copy
- lookahead.next
+ lookahead.next()
lookahead.ch match {
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' |
'8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' =>
putChar(in.ch)
- in.next
- return getFraction
+ in.next()
+ return getFraction()
case _ =>
if (!isIdentifierStart(lookahead.ch)) {
putChar(in.ch)
- in.next
- return getFraction
+ in.next()
+ return getFraction()
}
}
}
@@ -852,11 +798,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
(in.ch == 'e' || in.ch == 'E' ||
in.ch == 'f' || in.ch == 'F' ||
in.ch == 'd' || in.ch == 'D')) {
- return getFraction
+ return getFraction()
}
setName()
if (in.ch == 'l' || in.ch == 'L') {
- in.next
+ in.next()
token = LONGLIT
}
}
@@ -868,7 +814,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def syntaxError(pos: Int, msg: String) {
error(pos, msg)
token = ERROR
- errpos = pos
}
/** generate an error at the current token position
@@ -879,7 +824,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def incompleteInputError(msg: String) {
incompleteInputError(pos, msg)
token = EOF
- errpos = pos
}
override def toString() = token match {
@@ -908,21 +852,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
/** INIT: read lookahead character and token.
*/
def init() {
- in.next
- nextToken
+ in.next()
+ nextToken()
}
}
- /** ...
- */
class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner {
in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError)
- init
- def warning(pos: Int, msg: String) = unit.warning(pos, msg)
+ init()
def error (pos: Int, msg: String) = unit. error(pos, msg)
def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg)
def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg)
- implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1
implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos)
}
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
index a562de291d..953a3c6d82 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
@@ -68,9 +68,6 @@ object JavaTokens extends ast.parser.Tokens {
final val VOLATILE = 68
final val WHILE = 69
- def isKeyword(code : Int) =
- code >= ABSTRACT && code <= WHILE
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -115,9 +112,6 @@ object JavaTokens extends ast.parser.Tokens {
final val GTGTEQ = 113
final val GTGTGTEQ = 114
- def isSymbol(code : Int) =
- code >= COMMA && code <= GTGTGTEQ
-
/** parenthesis */
final val LPAREN = 115
final val RPAREN = 116
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
deleted file mode 100644
index 5ca9fd5062..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import ast.{ Printers, Trees }
-import java.io.{ StringWriter, PrintWriter }
-import scala.annotation.elidable
-import scala.language.postfixOps
-
-/** Ancillary bits of ParallelMatching which are better off
- * out of the way.
- */
-trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
-
- import global.{ typer => _, _ }
- import CODE._
-
- /** Debugging support: enable with -Ypmat-debug **/
- private final def trace = settings.Ypmatdebug.value
-
- def impossible: Nothing = abort("this never happens")
-
- def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] =
- tree filter (pf isDefinedAt _) map (x => pf(x))
-
- object Types {
- import definitions._
-
- val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass)
-
- implicit class RichType(undecodedTpe: Type) {
- def tpe = decodedEqualsType(undecodedTpe)
- def isAnyRef = tpe <:< AnyRefClass.tpe
-
- // These tests for final classes can inspect the typeSymbol
- private def is(s: Symbol) = tpe.typeSymbol eq s
- def isByte = is(ByteClass)
- def isShort = is(ShortClass)
- def isInt = is(IntClass)
- def isChar = is(CharClass)
- def isBoolean = is(BooleanClass)
- def isNothing = is(NothingClass)
- def isArray = is(ArrayClass)
- }
- }
-
- object Debug {
- def typeToString(t: Type): String = t match {
- case NoType => "x"
- case x => x.toString
- }
- def symbolToString(s: Symbol): String = s match {
- case x => x.toString
- }
- def treeToString(t: Tree): String = treeInfo.unbind(t) match {
- case EmptyTree => "?"
- case WILD() => "_"
- case Literal(Constant(x)) => "LIT(%s)".format(x)
- case Apply(fn, args) => "%s(%s)".format(treeToString(fn), args map treeToString mkString ",")
- case Typed(expr, tpt) => "%s: %s".format(treeToString(expr), treeToString(tpt))
- case x => x.toString + " (" + x.getClass + ")"
- }
-
- // Formatting for some error messages
- private val NPAD = 15
- def pad(s: String): String = "%%%ds" format (NPAD-1) format s
- def pad(s: Any): String = pad(s match {
- case x: Tree => treeToString(x)
- case x => x.toString
- })
-
- // pretty print for debugging
- def pp(x: Any): String = pp(x, false)
- def pp(x: Any, newlines: Boolean): String = {
- val stripStrings = List("""java\.lang\.""", """\$iw\.""")
-
- def clean(s: String): String =
- stripStrings.foldLeft(s)((s, x) => s.replaceAll(x, ""))
-
- def pplist(xs: List[Any]): String =
- if (newlines) (xs map (" " + _ + "\n")).mkString("\n", "", "")
- else xs.mkString("(", ", ", ")")
-
- pp(x match {
- case s: String => return clean(s)
- case x: Tree => asCompactString(x)
- case xs: List[_] => pplist(xs map pp)
- case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2))
- case x => x.toString
- })
- }
-
- @elidable(elidable.FINE) def TRACE(f: String, xs: Any*): Unit = {
- if (trace) {
- val msg = if (xs.isEmpty) f else f.format(xs map pp: _*)
- println(msg)
- }
- }
- @elidable(elidable.FINE) def traceCategory(cat: String, f: String, xs: Any*) = {
- if (trace)
- TRACE("[" + """%10s""".format(cat) + "] " + f, xs: _*)
- }
- def tracing[T](s: String)(x: T): T = {
- if (trace)
- println(("[" + """%10s""".format(s) + "] %s") format pp(x))
-
- x
- }
- private[nsc] def printing[T](fmt: String, xs: Any*)(x: T): T = {
- println(fmt.format(xs: _*) + " == " + x)
- x
- }
- private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = {
- if (settings.debug.value) printing(fmt, xs: _*)(x)
- else x
- }
-
- def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n"
- def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString
- }
-
- /** Drops the 'i'th element of a list.
- */
- def dropIndex[T](xs: List[T], n: Int) = {
- val (l1, l2) = xs splitAt n
- l1 ::: (l2 drop 1)
- }
-
- /** Extract the nth element of a list and return it and the remainder.
- */
- def extractIndex[T](xs: List[T], n: Int): (T, List[T]) =
- (xs(n), dropIndex(xs, n))
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
deleted file mode 100644
index daefe4c545..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ /dev/null
@@ -1,259 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import symtab.Flags
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-trait Matrix extends MatrixAdditions {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import analyzer.Typer
- import CODE._
- import Debug._
- import Flags.{ SYNTHETIC, MUTABLE }
-
- private[matching] val NO_EXHAUSTIVE = Flags.TRANS_FLAG
-
- /** Translation of match expressions.
- *
- * `p`: pattern
- * `g`: guard
- * `bx`: body index
- *
- * internal representation is (tvars:List[Symbol], rows:List[Row])
- *
- * tmp1 tmp_n
- * Row( p_11 ... p_1n g_1 b_1 ) + subst
- *
- * Row( p_m1 ... p_mn g_m b_m ) + subst
- *
- * Implementation based on the algorithm described in
- *
- * "A Term Pattern-Match Compiler Inspired by Finite Automata Theory"
- * Mikael Pettersson
- * ftp://ftp.ida.liu.se/pub/labs/pelab/papers/cc92pmc.ps.gz
- *
- * @author Burak Emir
- */
-
- /** "The Mixture Rule"
-
- {v=pat1, pats1 .. } {q1}
- match {.. } {..}
- {v=patn, patsn .. } {qn}
-
- The is the real work-horse of the algorithm. There is some column whose top-most pattern is a
- constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.)
- The goal is to build a test state with the variablevand some outgoing arcs (one for each construc-
- tor and possibly a default arc). Foreach constructor in the selected column, its arc is defined as
- follows:
-
- Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat-
- terns are viewed as regular expressions, this will be the indices of the patterns that either
- have the same constructor c, or are wildcards.
-
- Let {pat1,...,patj} be the patterns in the column corresponding to the indices computed
- above, and let nbe the arity of the constructor c, i.e. the number of sub-patterns it has. For
- eachpati, its n sub-patterns are extracted; if pat i is a wildcard, nwildcards are produced
- instead, each tagged with the right path variable. This results in a pattern matrix with n
- columns and j rows. This matrix is then appended to the result of selecting, from each col-
- umn in the rest of the original matrix, those rows whose indices are in {i1,...,ij}. Finally
- the indices are used to select the corresponding final states that go with these rows. Note
- that the order of the indices is significant; selected rows do not change their relative orders.
- The arc for the constructor c is now defined as (c’,state), where c’ is cwith any
- immediate sub-patterns replaced by their path variables (thus c’ is a simple pattern), and
- state is the result of recursively applying match to the new matrix and the new sequence
- of final states.
-
- Finally, the possibility for matching failure is considered. If the set of constructors is exhaustive,
- then no more arcs are computed. Otherwise, a default arc(_,state)is the last arc. If there are
- any wildcard patterns in the selected column, then their rows are selected from the rest of the
- matrix and the final states, and the state is the result of applying match to the new matrix and
- states. Otherwise,the error state is used after its reference count has been incremented.
- **/
-
- /** Handles all translation of pattern matching.
- */
- def handlePattern(
- selector: Tree, // tree being matched upon (called scrutinee after this)
- cases: List[CaseDef], // list of cases in the match
- isChecked: Boolean, // whether exhaustiveness checking is enabled (disabled with @unchecked)
- context: MatrixContext): Tree =
- {
- import context._
- TRACE("handlePattern", "(%s: %s) match { %s cases }", selector, selector.tpe, cases.size)
-
- val matrixInit: MatrixInit = {
- val v = copyVar(selector, isChecked, selector.tpe, "temp")
- MatrixInit(List(v), cases, atPos(selector.pos)(MATCHERROR(v.ident)))
- }
- val matrix = new MatchMatrix(context) { lazy val data = matrixInit }
- val mch = typer typed matrix.expansion.toTree
- val dfatree = typer typed Block(matrix.data.valDefs, mch)
-
- // redundancy check
- matrix.targets filter (_.unreached) foreach (cs => cunit.error(cs.body.pos, "unreachable code"))
- // optimize performs squeezing and resets any remaining NO_EXHAUSTIVE
- tracing("handlePattern")(matrix optimize dfatree)
- }
-
- case class MatrixContext(
- cunit: CompilationUnit, // current unit
- handleOuter: Tree => Tree, // for outer pointer
- typer: Typer, // a local typer
- owner: Symbol, // the current owner
- matchResultType: Type) // the expected result type of the whole match
- extends Squeezer
- {
- private def ifNull[T](x: T, alt: T) = if (x == null) alt else x
-
- // NO_EXHAUSTIVE communicates there should be no exhaustiveness checking
- private def flags(checked: Boolean) = if (checked) Nil else List(NO_EXHAUSTIVE)
-
- // Recording the symbols of the synthetics we create so we don't go clearing
- // anyone else's mutable flags.
- private val _syntheticSyms = mutable.HashSet[Symbol]()
- def clearSyntheticSyms() = {
- _syntheticSyms foreach (_ resetFlag (NO_EXHAUSTIVE|MUTABLE))
- debuglog("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.")
- _syntheticSyms.clear()
- }
- def recordSyntheticSym(sym: Symbol): Symbol = {
- _syntheticSyms += sym
- if (_syntheticSyms.size > 25000) {
- cunit.error(owner.pos, "Sanity check failed: over 25000 symbols created for pattern match.")
- abort("This is a bug in the pattern matcher.")
- }
- sym
- }
-
- case class MatrixInit(
- roots: List[PatternVar],
- cases: List[CaseDef],
- default: Tree
- ) {
- def tvars = roots map (_.lhs)
- def valDefs = roots map (_.valDef)
- override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size)
- }
-
- implicit def pvlist2pvgroup(xs: List[PatternVar]): PatternVarGroup =
- PatternVarGroup(xs)
-
- object PatternVarGroup {
- def apply(xs: PatternVar*) = new PatternVarGroup(xs.toList)
- def apply(xs: List[PatternVar]) = new PatternVarGroup(xs)
-
- // XXX - transitional
- def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = {
- def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v)
- val info =
- if (freeVars.isEmpty) vlist
- else (freeVars map vmap).flatten
-
- val xs =
- for (Binding(lhs, rhs) <- info) yield
- new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE))
-
- new PatternVarGroup(xs)
- }
- }
-
- val emptyPatternVarGroup = PatternVarGroup()
- class PatternVarGroup(val pvs: List[PatternVar]) {
- def syms = pvs map (_.sym)
- def valDefs = pvs map (_.valDef)
- def idents = pvs map (_.ident)
-
- def extractIndex(index: Int): (PatternVar, PatternVarGroup) = {
- val (t, ts) = self.extractIndex(pvs, index)
- (t, PatternVarGroup(ts))
- }
-
- def isEmpty = pvs.isEmpty
- def size = pvs.size
- def head = pvs.head
- def ::(t: PatternVar) = PatternVarGroup(t :: pvs)
- def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs)
- def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs)
-
- def apply(i: Int) = pvs(i)
- def zipWithIndex = pvs.zipWithIndex
- def indices = pvs.indices
- def map[T](f: PatternVar => T) = pvs map f
- def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p)
-
- override def toString() = pp(pvs)
- }
-
- /** Every temporary variable allocated is put in a PatternVar.
- */
- class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
- def sym = lhs
- def tpe = lhs.tpe
- if (checked)
- lhs resetFlag NO_EXHAUSTIVE
- else
- lhs setFlag NO_EXHAUSTIVE
-
- // See #1427 for an example of a crash which occurs unless we retype:
- // in that instance there is an existential in the pattern.
- lazy val ident = typer typed Ident(lhs)
- lazy val valDef = typer typedValDef ValDef(lhs, rhs)
-
- override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
- }
-
- /** Given a tree, creates a new synthetic variable of the same type
- * and assigns the tree to it.
- */
- def copyVar(
- root: Tree,
- checked: Boolean,
- _tpe: Type = null,
- label: String = "temp"): PatternVar =
- {
- val tpe = ifNull(_tpe, root.tpe)
- val name = cunit.freshTermName(label)
- val sym = newVar(root.pos, tpe, flags(checked), name)
-
- tracing("copy")(new PatternVar(sym, root, checked))
- }
-
- /** Creates a new synthetic variable of the specified type and
- * assigns the result of f(symbol) to it.
- */
- def createVar(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
- val lhs = newVar(owner.pos, tpe, flags(checked))
- val rhs = f(lhs)
-
- tracing("create")(new PatternVar(lhs, rhs, checked))
- }
- def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
- val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked))
- val rhs = f(lhs)
-
- tracing("createLazy")(new PatternVar(lhs, rhs, checked))
- }
-
- private def newVar(
- pos: Position,
- tpe: Type,
- flags: List[Long] = Nil,
- name: TermName = null): Symbol =
- {
- val n = if (name == null) cunit.freshTermName("temp") else name
- // careful: pos has special meaning
- val flagsLong = (SYNTHETIC.toLong /: flags)(_|_)
- recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
deleted file mode 100644
index 7220253003..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ /dev/null
@@ -1,193 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-
-/** Traits which are mixed into MatchMatrix, but separated out as
- * (somewhat) independent components to keep them on the sidelines.
- */
-trait MatrixAdditions extends ast.TreeDSL {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import symtab.Flags
- import CODE._
- import Debug._
- import treeInfo._
- import definitions.{ isPrimitiveValueClass }
-
- /** The Squeezer, responsible for all the squeezing.
- */
- private[matching] trait Squeezer {
- self: MatrixContext =>
-
- private val settings_squeeze = !settings.Ynosqueeze.value
-
- class RefTraverser(vd: ValDef) extends Traverser {
- private val targetSymbol = vd.symbol
- private var safeRefs = 0
- private var isSafe = true
-
- def canDrop = isSafe && safeRefs == 0
- def canInline = isSafe && safeRefs == 1
-
- override def traverse(tree: Tree): Unit = tree match {
- case t: Ident if t.symbol eq targetSymbol =>
- // target symbol's owner should match currentOwner
- if (targetSymbol.owner == currentOwner) safeRefs += 1
- else isSafe = false
-
- case LabelDef(_, params, rhs) =>
- if (params exists (_.symbol eq targetSymbol)) // cannot substitute this one
- isSafe = false
-
- traverse(rhs)
- case _ if safeRefs > 1 => ()
- case _ =>
- super.traverse(tree)
- }
- }
-
- /** Compresses multiple Blocks. */
- private def combineBlocks(stats: List[Tree], expr: Tree): Tree = expr match {
- case Block(stats1, expr1) if stats.isEmpty => combineBlocks(stats1, expr1)
- case _ => Block(stats, expr)
- }
- def squeezedBlock(vds: List[Tree], exp: Tree): Tree =
- if (settings_squeeze) combineBlocks(Nil, squeezedBlock1(vds, exp))
- else combineBlocks(vds, exp)
-
- private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = {
- lazy val squeezedTail = squeezedBlock(vds.tail, exp)
- def default = squeezedTail match {
- case Block(vds2, exp2) => Block(vds.head :: vds2, exp2)
- case exp2 => Block(vds.head :: Nil, exp2)
- }
-
- if (vds.isEmpty) exp
- else vds.head match {
- case vd: ValDef =>
- val rt = new RefTraverser(vd)
- rt.atOwner(owner)(rt traverse squeezedTail)
-
- if (rt.canDrop)
- squeezedTail
- else if (isConstantType(vd.symbol.tpe) || rt.canInline)
- new TreeSubstituter(List(vd.symbol), List(vd.rhs)) transform squeezedTail
- else
- default
- case _ => default
- }
- }
- }
-
- /** The Optimizer, responsible for some of the optimizing.
- */
- private[matching] trait MatchMatrixOptimizer {
- self: MatchMatrix =>
-
- import self.context._
-
- final def optimize(tree: Tree): Tree = {
- // Uses treeInfo extractors rather than looking at trees directly
- // because the many Blocks obscure our vision.
- object lxtt extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Block(stats, ld @ LabelDef(_, _, body)) if targets exists (_ shouldInline ld.symbol) =>
- squeezedBlock(transformStats(stats, currentOwner), body)
- case IsIf(cond, IsTrue(), IsFalse()) =>
- transform(cond)
- case IsIf(cond1, IsIf(cond2, thenp, elsep1), elsep2) if elsep1 equalsStructure elsep2 =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, elsep2))
- case If(cond1, IsIf(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, ld))
- case _ =>
- super.transform(tree)
- }
- }
- try lxtt transform tree
- finally clearSyntheticSyms()
- }
- }
-
- /** The Exhauster.
- */
- private[matching] trait MatrixExhaustiveness {
- self: MatchMatrix =>
-
- import self.context._
-
- /** Exhaustiveness checking requires looking for sealed classes
- * and if found, making sure all children are covered by a pattern.
- */
- class ExhaustivenessChecker(rep: Rep, matchPos: Position) {
- val Rep(tvars, rows) = rep
-
- import Flags.{ MUTABLE, ABSTRACT, SEALED }
-
- private case class Combo(index: Int, sym: Symbol) { }
-
- /* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */
- private def rowCoversCombo(row: Row, combos: List[Combo]) =
- row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym)
-
- private def requiresExhaustive(sym: Symbol) = {
- (sym.isMutable) && // indicates that have not yet checked exhaustivity
- !(sym hasFlag NO_EXHAUSTIVE) && // indicates @unchecked
- (sym.tpe.typeSymbol.isSealed) &&
- !isPrimitiveValueClass(sym.tpe.typeSymbol) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- }
-
- private lazy val inexhaustives: List[List[Combo]] = {
- // let's please not get too clever side-effecting the mutable flag.
- val toCollect = tvars.zipWithIndex filter { case (pv, i) => requiresExhaustive(pv.sym) }
- val collected = toCollect map { case (pv, i) =>
- // okay, now reset the flag
- pv.sym resetFlag MUTABLE
-
- i -> (
- pv.tpe.typeSymbol.sealedDescendants.toList sortBy (_.sealedSortName)
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
- // have to filter out children which cannot match: see ticket #3683 for an example
- filter (_.tpe matchesPattern pv.tpe)
- )
- }
-
- val folded =
- collected.foldRight(List[List[Combo]]())((c, xs) => {
- val (i, syms) = c match { case (i, set) => (i, set.toList) }
- xs match {
- case Nil => syms map (s => List(Combo(i, s)))
- case _ => for (s <- syms ; rest <- xs) yield Combo(i, s) :: rest
- }
- })
-
- folded filterNot (combo => rows exists (r => rowCoversCombo(r, combo)))
- }
-
- private def mkPad(xs: List[Combo], i: Int): String = xs match {
- case Nil => pad("*")
- case Combo(j, sym) :: rest => if (j == i) pad(sym.name.toString) else mkPad(rest, i)
- }
- private def mkMissingStr(open: List[Combo]) =
- "missing combination %s\n" format tvars.indices.map(mkPad(open, _)).mkString
-
- /** The only public method. */
- def check = {
- def errMsg = (inexhaustives map mkMissingStr).mkString
- if (inexhaustives.nonEmpty)
- cunit.warning(matchPos, "match is not exhaustive!\n" + errMsg)
-
- rep
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
deleted file mode 100644
index dbb9b7a003..0000000000
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ /dev/null
@@ -1,870 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Copyright 2007 Google Inc. All Rights Reserved.
- * Author: bqe@google.com (Burak Emir)
- */
-
-package scala.tools.nsc
-package matching
-
-import PartialFunction._
-import scala.collection.{ mutable }
-import scala.reflect.internal.util.Position
-import transform.ExplicitOuter
-import symtab.Flags
-import mutable.ListBuffer
-import scala.annotation.elidable
-import scala.language.postfixOps
-import scala.tools.nsc.settings.ScalaVersion
-
-trait ParallelMatching extends ast.TreeDSL
- with MatchSupport
- with Matrix
- with Patterns
- with PatternBindings
-{
- self: ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions.{
- AnyRefClass, IntClass, BooleanClass, SomeClass, OptionClass,
- getProductArgs, productProj, Object_eq, Any_asInstanceOf
- }
- import CODE._
- import Types._
- import Debug._
-
- /** Transition **/
- def toPats(xs: List[Tree]): List[Pattern] = xs map Pattern.apply
-
- /** The umbrella matrix class. **/
- abstract class MatchMatrix(val context: MatrixContext) extends MatchMatrixOptimizer with MatrixExhaustiveness {
- import context._
-
- def data: MatrixContext#MatrixInit
-
- lazy val MatrixInit(roots, cases, failTree) = data
- lazy val (rows, targets) = expand(roots, cases).unzip
- lazy val expansion: Rep = make(roots, rows)
-
- private val shortCuts = perRunCaches.newMap[Int, Symbol]()
-
- final def createShortCut(theLabel: Symbol): Int = {
- val key = shortCuts.size + 1
- shortCuts(key) = theLabel
- -key
- }
- def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
- val labelName = cunit.freshTermName(namePrefix)
- val labelSym = owner.newLabel(labelName, owner.pos)
- val labelInfo = MethodType(params, restpe)
-
- LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
- }
-
- /** This is the recursively focal point for translating the current
- * list of pattern variables and a list of pattern match rows into
- * a tree suitable for entering erasure.
- *
- * The first time it is called, the variables are (copies of) the
- * original pattern matcher roots, and the rows correspond to the
- * original casedefs.
- */
- final def make(roots1: PatternVarGroup, rows1: List[Row]): Rep = {
- traceCategory("New Match", "%sx%s (%s)", roots1.size, rows1.size, roots1.syms.mkString(", "))
- def classifyPat(opat: Pattern, j: Int): Pattern = opat simplify roots1(j)
-
- val newRows = rows1 flatMap (_ expandAlternatives classifyPat)
- if (rows1.length != newRows.length) make(roots1, newRows) // recursive call if any change
- else {
- val rep = Rep(roots1, newRows)
- new ExhaustivenessChecker(rep, roots.head.sym.pos).check
- rep
- }
- }
-
- override def toString() = "MatchMatrix(%s) { %s }".format(matchResultType, indentAll(targets))
-
- /**
- * Encapsulates a symbol being matched on. It is created from a
- * PatternVar, which encapsulates the symbol's creation and assignment.
- *
- * We never match on trees directly - a temporary variable is created
- * (in a PatternVar) for any expression being matched on.
- */
- class Scrutinee(val pv: PatternVar) {
- import definitions._
-
- // presenting a face of our symbol
- def sym = pv.sym
- def tpe = sym.tpe
- def pos = sym.pos
- def id = ID(sym) setPos pos // attributed ident
-
- def accessors = if (isCaseClass) sym.caseFieldAccessors else Nil
- def accessorTypes = accessors map (x => (tpe memberType x).resultType)
-
- lazy val accessorPatternVars = PatternVarGroup(
- for ((accessor, tpe) <- accessors zip accessorTypes) yield
- createVar(tpe, _ => fn(id, accessor))
- )
-
- private def extraValDefs = if (pv.rhs.isEmpty) Nil else List(pv.valDef)
- def allValDefs = extraValDefs ::: accessorPatternVars.valDefs
-
- // tests
- def isDefined = sym ne NoSymbol
- def isSubrangeType = subrangeTypes(tpe.typeSymbol)
- def isCaseClass = tpe.typeSymbol.isCase
-
- // sequences
- def seqType = tpe.widen baseType SeqClass
- def elemType = tpe typeArgs 0
-
- private def elemAt(i: Int) = (id DOT (tpe member nme.apply))(LIT(i))
- private def createElemVar(i: Int) = createVar(elemType, _ => elemAt(i))
- private def createSeqVar(drop: Int) = createVar(seqType, _ => id DROP drop)
-
- def createSequenceVars(count: Int): List[PatternVar] =
- (0 to count).toList map (i => if (i < count) createElemVar(i) else createSeqVar(i))
-
- // for propagating "unchecked" to synthetic vars
- def isChecked = !(sym hasFlag NO_EXHAUSTIVE)
- def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _)
-
- // this is probably where this actually belongs
- def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked)
-
- def castedTo(headType: Type) =
- if (tpe =:= headType) this
- else new Scrutinee(createVar(headType, lhs => gen.mkAsInstanceOf(id, lhs.tpe)))
-
- override def toString() = "(%s: %s)".format(id, tpe)
- }
-
- def isPatternSwitch(scrut: Scrutinee, ps: List[Pattern]): Option[PatternSwitch] = {
- def isSwitchableConst(x: Pattern) = cond(x) { case x: LiteralPattern if x.isSwitchable => true }
- def isSwitchableDefault(x: Pattern) = isSwitchableConst(x) || x.isDefault
-
- // TODO - scala> (5: Any) match { case 5 => 5 ; case 6 => 7 }
- // ... should compile to a switch. It doesn't because the scrut isn't Int/Char, but
- // that could be handle in an if/else since every pattern requires an Int.
- // More immediately, Byte and Short scruts should also work.
- if (!scrut.isSubrangeType) None
- else {
- val (_lits, others) = ps span isSwitchableConst
- val lits = _lits collect { case x: LiteralPattern => x }
-
- condOpt(others) {
- case Nil => new PatternSwitch(scrut, lits, None)
- // TODO: This needs to also allow the case that the last is a compatible type pattern.
- case List(x) if isSwitchableDefault(x) => new PatternSwitch(scrut, lits, Some(x))
- }
- }
- }
-
- class PatternSwitch(
- scrut: Scrutinee,
- override val ps: List[LiteralPattern],
- val defaultPattern: Option[Pattern]
- ) extends PatternMatch(scrut, ps) {
- require(scrut.isSubrangeType && (ps forall (_.isSwitchable)))
- }
-
- case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) {
- def head = ps.head
- def tail = ps.tail
- def size = ps.length
-
- def headType = head.necessaryType
- private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0
- def dummies = emptyPatterns(dummyCount)
-
- def apply(i: Int): Pattern = ps(i)
- def pzip() = ps.zipWithIndex
- def pzip[T](others: List[T]) = {
- assert(ps.size == others.size, "Internal error: ps = %s, others = %s".format(ps, others))
- ps zip others
- }
-
- // Any unapply - returns Some(true) if a type test is needed before the unapply can
- // be called (e.g. def unapply(x: Foo) = { ... } but our scrutinee is type Any.)
- object AnyUnapply {
- def unapply(x: Pattern): Option[Boolean] = condOpt(x.tree) {
- case UnapplyParamType(tpe) => !(scrut.tpe <:< tpe)
- }
- }
-
- def mkRule(rest: Rep): RuleApplication = {
- tracing("Rule")(head match {
- case x if isEquals(x.tree.tpe) => new MixEquals(this, rest)
- case x: SequencePattern => new MixSequence(this, rest, x)
- case AnyUnapply(false) => new MixUnapply(this, rest)
- case _ =>
- isPatternSwitch(scrut, ps) match {
- case Some(x) => new MixLiteralInts(x, rest)
- case _ => new MixTypes(this, rest)
- }
- })
- }
- override def toString() = "%s match {%s}".format(scrut, indentAll(ps))
- } // PatternMatch
-
- /***** Rule Applications *****/
-
- sealed abstract class RuleApplication {
- def pmatch: PatternMatch
- def rest: Rep
- def cond: Tree
- def success: Tree
- def failure: Tree
-
- lazy val PatternMatch(scrut, patterns) = pmatch
- lazy val head = pmatch.head
- lazy val codegen: Tree = IF (cond) THEN (success) ELSE (failure)
-
- def mkFail(xs: List[Row]): Tree =
- if (xs.isEmpty) failTree
- else remake(xs).toTree
-
- def remake(
- rows: List[Row],
- pvgroup: PatternVarGroup = emptyPatternVarGroup,
- includeScrut: Boolean = true): Rep =
- {
- val scrutpvs = if (includeScrut) List(scrut.pv) else Nil
- make(pvgroup.pvs ::: scrutpvs ::: rest.tvars, rows)
- }
-
- /** translate outcome of the rule application into code (possible involving recursive application of rewriting) */
- def tree(): Tree
-
- override def toString =
- "Rule/%s (%s =^= %s)".format(getClass.getSimpleName, scrut, head)
- }
-
- /** {case ... if guard => bx} else {guardedRest} */
- /** VariableRule: The top-most rows has only variable (non-constructor) patterns. */
- case class VariableRule(subst: Bindings, guard: Tree, guardedRest: Rep, bx: Int) extends RuleApplication {
- def pmatch: PatternMatch = impossible
- def rest: Rep = guardedRest
-
- private lazy val (valDefs, successTree) = targets(bx) applyBindings subst.toMap
- lazy val cond = guard
- lazy val success = successTree
- lazy val failure = guardedRest.toTree
-
- final def tree(): Tree =
- if (bx < 0) REF(shortCuts(-bx))
- else squeezedBlock(
- valDefs,
- if (cond.isEmpty) success else codegen
- )
-
- override def toString = "(case %d) {\n Bindings: %s\n\n if (%s) { %s }\n else { %s }\n}".format(
- bx, subst, guard, success, guardedRest
- )
- }
-
- class MixLiteralInts(val pmatch: PatternSwitch, val rest: Rep) extends RuleApplication {
- val literals = pmatch.ps
- val defaultPattern = pmatch.defaultPattern
-
- private lazy val casted: Tree =
- if (!scrut.tpe.isInt) scrut.id DOT nme.toInt else scrut.id
-
- // creates a row transformer for injecting the default case bindings at a given index
- private def addDefaultVars(index: Int): Row => Row =
- if (defaultVars.isEmpty) identity
- else rebindAll(_, pmatch(index).boundVariables, scrut.sym)
-
- // add bindings for all the given vs to the given tvar
- private def rebindAll(r: Row, vs: Iterable[Symbol], tvar: Symbol) =
- r rebind r.subst.add(vs, tvar)
-
- private def bindVars(Tag: Int, orig: Bindings): Bindings = {
- def myBindVars(rest: List[(Int, List[Symbol])], bnd: Bindings): Bindings = rest match {
- case Nil => bnd
- case (Tag,vs)::xs => myBindVars(xs, bnd.add(vs, scrut.sym))
- case (_, vs)::xs => myBindVars(xs, bnd)
- }
- myBindVars(varMap, orig)
- }
-
- // bound vars and rows for default pattern (only one row, but a list is easier to use later)
- lazy val (defaultVars, defaultRows) = defaultPattern match {
- case None => (Nil, Nil)
- case Some(p) => (p.boundVariables, List(rebindAll(rest rows literals.size, p.boundVariables, scrut.sym)))
- }
-
- // literalMap is a map from each literal to a list of row indices.
- // varMap is a list from each literal to a list of the defined vars.
- lazy val (litPairs, varMap) = (
- literals.zipWithIndex map {
- case (lit, index) =>
- val tag = lit.intValue
- (tag -> index, tag -> lit.boundVariables)
- } unzip
- )
- def literalMap = litPairs groupBy (_._1) map {
- case (k, vs) => (k, vs map (_._2))
- }
-
- lazy val cases =
- for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
- val newRows = indices map (i => addDefaultVars(i)(rest rows i))
- val r = remake(newRows ++ defaultRows, includeScrut = false)
- val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
-
- CASE(Literal(Constant(tag))) ==> r2.toTree
- }
-
- lazy val defaultTree = remake(defaultRows, includeScrut = false).toTree
- def defaultCase = CASE(WILD(IntClass.tpe)) ==> defaultTree
-
- // cond/success/failure only used if there is exactly one case.
- lazy val cond = scrut.id MEMBER_== cases.head.pat
- lazy val success = cases.head.body
- lazy val failure = defaultTree
-
- // only one case becomes if/else, otherwise match
- def tree() =
- if (cases.size == 1) codegen
- else casted MATCH (cases :+ defaultCase: _*)
- }
-
- /** mixture rule for unapply pattern
- */
- class MixUnapply(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- val Pattern(UnApply(unMethod, unArgs)) = head
- val Apply(unTarget, _ :: trailing) = unMethod
-
- object SameUnapplyCall {
- def isSame(t: Tree) = isEquivalentTree(unTarget, t)
- def unapply(x: Pattern) = /*tracing("SameUnapplyCall (%s vs. %s)".format(unTarget, x))*/(x match {
- case Pattern(UnApply(Apply(fn, _), args)) if isSame(fn) => Some(args)
- case _ => None
- })
- }
- object SameUnapplyPattern {
- def isSame(t: Tree) = isEquivalentTree(unMethod, t)
- def apply(x: Pattern) = unapply(x).isDefined
- def unapply(x: Pattern) = /*tracing("SameUnapplyPattern (%s vs. %s)".format(unMethod, x))*/(x match {
- case Pattern(UnApply(t, _)) if isSame(t) => Some(unArgs)
- case _ => None
- })
- }
-
- private lazy val zipped = pmatch pzip rest.rows
-
- lazy val unapplyResult: PatternVar =
- scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe)
-
- lazy val cond: Tree = unapplyResult.tpe.normalize match {
- case TypeRef(_, BooleanClass, _) => unapplyResult.ident
- case TypeRef(_, SomeClass, _) => TRUE
- case _ => NOT(unapplyResult.ident DOT nme.isEmpty)
- }
-
- lazy val failure =
- mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat })
-
- private def doSuccess: (List[PatternVar], List[PatternVar], List[Row]) = {
- // pattern variable for the unapply result of Some(x).get
- def unMethodTypeArg = unMethod.tpe.baseType(OptionClass).typeArgs match {
- case Nil => log("No type argument for unapply result! " + unMethod.tpe) ; NoType
- case arg :: _ => arg
- }
- lazy val pv = scrut.createVar(unMethodTypeArg, _ => fn(ID(unapplyResult.lhs), nme.get))
- def tuple = pv.lhs
-
- // at this point it's Some[T1,T2...]
- lazy val tpes = getProductArgs(tuple.tpe)
-
- // one pattern variable per tuple element
- lazy val tuplePVs =
- for ((tpe, i) <- tpes.zipWithIndex) yield
- scrut.createVar(tpe, _ => fn(ID(tuple), productProj(tuple, i + 1)))
-
- // the filter prevents infinite unapply recursion
- def mkNewRows(sameFilter: (List[Tree]) => List[Tree]) = {
- val dum = if (unArgs.length <= 1) unArgs.length else tpes.size
- for ((pat, r) <- zipped) yield pat match {
- case SameUnapplyCall(xs) => r.insert2(toPats(sameFilter(xs)) :+ NoPattern, pat.boundVariables, scrut.sym)
- case _ => r insert (emptyPatterns(dum) :+ pat)
- }
- }
-
- // 0 is Boolean, 1 is Option[T], 2+ is Option[(T1,T2,...)]
- unArgs.length match {
- case 0 => (Nil, Nil, mkNewRows((xs) => Nil))
- case 1 => (List(pv), List(pv), mkNewRows(xs => List(xs.head)))
- case _ => (pv :: tuplePVs, tuplePVs, mkNewRows(identity))
- }
- }
-
- lazy val success = {
- val (squeezePVs, pvs, rows) = doSuccess
- val srep = remake(rows, pvs).toTree
-
- squeezedBlock(squeezePVs map (_.valDef), srep)
- }
-
- final def tree() =
- squeezedBlock(List(handleOuter(unapplyResult.valDef)), codegen)
- }
-
- /** Handle Sequence patterns (including Star patterns.)
- * Note: pivot == head, just better typed.
- */
- sealed class MixSequence(val pmatch: PatternMatch, val rest: Rep, pivot: SequencePattern) extends RuleApplication {
- require(scrut.tpe <:< head.tpe)
-
- def hasStar = pivot.hasStar
- private def pivotLen = pivot.nonStarLength
- private def seqDummies = emptyPatterns(pivot.elems.length + 1)
-
- // Should the given pattern join the expanded pivot in the success matrix? If so,
- // this partial function will be defined for the pattern, and the result of the apply
- // is the expanded sequence of new patterns.
- lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] {
- private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match {
- case (true, true) => true
- case (true, false) => pivotLen <= x.nonStarLength
- case (false, true) => pivotLen >= x.nonStarLength
- case (false, false) => pivotLen == x.nonStarLength
- }
-
- def isDefinedAt(pat: Pattern) = pat match {
- case x: SequenceLikePattern => seqIsDefinedAt(x)
- case WildcardPattern() => true
- case _ => false
- }
-
- def apply(pat: Pattern): List[Pattern] = pat match {
- case x: SequenceLikePattern =>
- def isSameLength = pivotLen == x.nonStarLength
- def rebound = x.nonStarPatterns :+ (x.elemPatterns.last rebindTo WILD(scrut.seqType))
-
- (pivot.hasStar, x.hasStar, isSameLength) match {
- case (true, true, true) => rebound :+ NoPattern
- case (true, true, false) => (seqDummies drop 1) :+ x
- case (true, false, true) => x.elemPatterns ++ List(NilPattern, NoPattern)
- case (false, true, true) => rebound
- case (false, false, true) => x.elemPatterns :+ NoPattern
- case _ => seqDummies
- }
-
- case _ => seqDummies
- }
- }
-
- // Should the given pattern be in the fail matrix? This is true of any sequences
- // as long as the result of the length test on the pivot doesn't make it impossible:
- // for instance if neither sequence is right ignoring and they are of different
- // lengths, the later one cannot match since its length must be wrong.
- def failureMatrixFn(c: Pattern) = (pivot ne c) && (c match {
- case x: SequenceLikePattern =>
- (hasStar, x.hasStar) match {
- case (_, true) => true
- case (true, false) => pivotLen > x.nonStarLength
- case (false, false) => pivotLen != x.nonStarLength
- }
- case WildcardPattern() => true
- case _ => false
- })
-
- // divide the remaining rows into success/failure branches, expanding subsequences of patterns
- val successRows = pmatch pzip rest.rows collect {
- case (c, row) if successMatrixFn isDefinedAt c => row insert successMatrixFn(c)
- }
- val failRows = pmatch pzip rest.rows collect {
- case (c, row) if failureMatrixFn(c) => row insert c
- }
-
- // the discrimination test for sequences is a call to lengthCompare. Note that
- // this logic must be fully consistent wiith successMatrixFn and failureMatrixFn above:
- // any inconsistency will (and frequently has) manifested as pattern matcher crashes.
- lazy val cond = {
- // the method call symbol
- val methodOp: Symbol = head.tpe member nme.lengthCompare
-
- // the comparison to perform. If the pivot is right ignoring, then a scrutinee sequence
- // of >= pivot length could match it; otherwise it must be exactly equal.
- val compareOp: (Tree, Tree) => Tree = if (hasStar) _ INT_>= _ else _ INT_== _
-
- // scrutinee.lengthCompare(pivotLength) [== | >=] 0
- val compareFn: Tree => Tree = (t: Tree) => compareOp((t DOT methodOp)(LIT(pivotLen)), ZERO)
-
- // wrapping in a null check on the scrutinee
- // XXX this needs to use the logic in "def condition"
- nullSafe(compareFn, FALSE)(scrut.id)
- // condition(head.tpe, scrut.id, head.boundVariables.nonEmpty)
- }
- lazy val success = {
- // one pattern var per sequence element up to elemCount, and one more for the rest of the sequence
- lazy val pvs = scrut createSequenceVars pivotLen
-
- squeezedBlock(pvs map (_.valDef), remake(successRows, pvs, hasStar).toTree)
- }
- lazy val failure = remake(failRows).toTree
-
- final def tree(): Tree = codegen
- }
-
- class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- private lazy val rhs =
- decodedEqualsType(head.tpe) match {
- case SingleType(pre, sym) => REF(pre, sym)
- case PseudoType(o) => o
- }
- private lazy val labelDef =
- createLabelDef("fail%", remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree)
-
- lazy val cond = handleOuter(rhs MEMBER_== scrut.id)
- lazy val successOne = rest.rows.head.insert2(List(NoPattern), head.boundVariables, scrut.sym)
- lazy val successTwo = Row(emptyPatterns(1 + rest.tvars.size), NoBinding, EmptyTree, createShortCut(labelDef.symbol))
- lazy val success = remake(List(successOne, successTwo)).toTree
- lazy val failure = labelDef
-
- final def tree() = codegen
- override def toString() = "MixEquals(%s == %s)".format(scrut, head)
- }
-
- /** Mixture rule for type tests.
- * moreSpecific: more specific patterns
- * subsumed: more general patterns (subsuming current), rows index and subpatterns
- * remaining: remaining, rows index and pattern
- */
- class MixTypes(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern])
- case class No(bx: Int, remaining: Pattern)
-
- val (yeses, noes) = {
- val _ys = new ListBuffer[Yes]
- val _ns = new ListBuffer[No]
-
- for ((pattern, j) <- pmatch.pzip()) {
- // scrutinee, head of pattern group
- val (s, p) = (pattern.tpe, head.necessaryType)
-
- def isEquivalent = head.necessaryType =:= pattern.tpe
- def isObjectTest = pattern.isObject && (p =:= pattern.necessaryType)
-
- def sMatchesP = matches(s, p)
- def pMatchesS = matches(p, s)
-
- def ifEquiv(yes: Pattern): Pattern = if (isEquivalent) yes else pattern
-
- def passl(p: Pattern = NoPattern, ps: List[Pattern] = pmatch.dummies) = Some(Yes(j, p, ps))
- def passr() = Some( No(j, pattern))
-
- def typed(pp: Tree) = passl(ifEquiv(Pattern(pp)))
- def subs() = passl(ifEquiv(NoPattern), pattern subpatterns pmatch)
-
- val (oneY, oneN) = pattern match {
- case Pattern(LIT(null)) if !(p =:= s) => (None, passr) // (1)
- case x if isObjectTest => (passl(), None) // (2)
- case Pattern(Typed(pp, _)) if sMatchesP => (typed(pp), None) // (4)
- // The next line used to be this which "fixed" 1697 but introduced
- // numerous regressions including #3136.
- // case Pattern(_: UnApply, _) => (passl(), passr)
- case Pattern(_: UnApply) => (None, passr)
- case x if !x.isDefault && sMatchesP => (subs(), None)
- case x if x.isDefault || pMatchesS => (passl(), passr)
- case _ => (None, passr)
- }
- oneY map (_ys +=)
- oneN map (_ns +=)
- }
- (_ys.toList, _ns.toList)
- }
-
- val moreSpecific = yeses map (_.moreSpecific)
- val subsumed = yeses map (x => (x.bx, x.subsumed))
- val remaining = noes map (x => (x.bx, x.remaining))
-
- private def mkZipped =
- for (Yes(j, moreSpecific, subsumed) <- yeses) yield
- j -> (moreSpecific :: subsumed)
-
- lazy val casted = scrut castedTo pmatch.headType
- lazy val cond = condition(casted.tpe, scrut, head.boundVariables.nonEmpty)
-
- private def isAnyMoreSpecific = yeses exists (x => !x.moreSpecific.isEmpty)
- lazy val (subtests, subtestVars) =
- if (isAnyMoreSpecific) (mkZipped, List(casted.pv))
- else (subsumed, Nil)
-
- lazy val newRows =
- for ((j, ps) <- subtests) yield
- (rest rows j).insert2(ps, pmatch(j).boundVariables, casted.sym)
-
- lazy val success = {
- val srep = remake(newRows, subtestVars ::: casted.accessorPatternVars, includeScrut = false)
- squeezedBlock(casted.allValDefs, srep.toTree)
- }
-
- lazy val failure =
- mkFail(remaining map { case (p1, p2) => rest rows p1 insert p2 })
-
- final def tree(): Tree = codegen
- }
-
- /*** States, Rows, Etc. ***/
-
- case class Row(pats: List[Pattern], subst: Bindings, guard: Tree, bx: Int) {
- private def nobindings = subst.get().isEmpty
- private def bindstr = if (nobindings) "" else pp(subst)
-
- /** Extracts the 'i'th pattern. */
- def extractColumn(i: Int) = {
- val (x, xs) = extractIndex(pats, i)
- (x, copy(pats = xs))
- }
-
- /** Replaces the 'i'th pattern with the argument. */
- def replaceAt(i: Int, p: Pattern) = {
- val newps = (pats take i) ::: p :: (pats drop (i + 1))
- copy(pats = newps)
- }
-
- def insert(h: Pattern) = copy(pats = h :: pats)
- def insert(hs: List[Pattern]) = copy(pats = hs ::: pats) // prepends supplied pattern
- def rebind(b: Bindings) = copy(subst = b) // substitutes for bindings
-
- def insert2(hs: List[Pattern], vs: Iterable[Symbol], tvar: Symbol) =
- tracing("insert2")(copy(pats = hs ::: pats, subst = subst.add(vs, tvar)))
-
- // returns this rows with alternatives expanded
- def expandAlternatives(classifyPat: (Pattern, Int) => Pattern): List[Row] = {
- def isNotAlternative(p: Pattern) = !cond(p.tree) { case _: Alternative => true }
-
- // classify all the top level patterns - alternatives come back unaltered
- val newPats: List[Pattern] = pats.zipWithIndex map classifyPat.tupled
- // see if any alternatives were in there
- val (ps, others) = newPats span isNotAlternative
- // make a new row for each alternative, with it spliced into the original position
- if (others.isEmpty) List(copy(pats = ps))
- else extractBindings(others.head) map (x => replaceAt(ps.size, x))
- }
- override def toString() = {
- val bs = if (nobindings) "" else "\n" + bindstr
- "Row(%d)(%s%s)".format(bx, pp(pats), bs)
- }
- }
- abstract class State {
- def bx: Int // index into the list of rows
- def params: List[Symbol] // bound names to be supplied as arguments to labeldef
- def body: Tree // body to execute upon match
- def label: Option[LabelDef] // label definition for this state
-
- // Called with a bindings map when a match is achieved.
- // Returns a list of variable declarations based on the labeldef parameters
- // and the given substitution, and the body to execute.
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]): (List[ValDef], Tree)
-
- final def applyBindings(subst: Map[Symbol, Symbol]): (List[ValDef], Tree) = {
- _referenceCount += 1
- applyBindingsImpl(subst)
- }
-
- private var _referenceCount = 0
- def referenceCount = _referenceCount
- def unreached = referenceCount == 0
- def shouldInline(sym: Symbol) = referenceCount == 1 && label.exists(_.symbol == sym)
-
- // Creates a simple Ident if the symbol's type conforms to
- // the val definition's type, or a casted Ident if not.
- private def newValIdent(lhs: Symbol, rhs: Symbol) =
- if (rhs.tpe <:< lhs.tpe) Ident(rhs)
- else gen.mkTypeApply(Ident(rhs), Any_asInstanceOf, List(lhs.tpe))
-
- protected def newValDefinition(lhs: Symbol, rhs: Symbol) =
- typer typedValDef ValDef(lhs, newValIdent(lhs, rhs))
-
- protected def newValReference(lhs: Symbol, rhs: Symbol) =
- typer typed newValIdent(lhs, rhs)
-
- protected def valDefsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValDefinition)
- protected def identsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValReference)
-
- protected def mapSubst[T](subst: Map[Symbol, Symbol])(f: (Symbol, Symbol) => T): List[T] =
- params flatMap { lhs =>
- subst get lhs map (rhs => f(lhs, rhs)) orElse {
- // This should not happen; the code should be structured so it is
- // impossible, but that still lies ahead.
- cunit.warning(lhs.pos, "No binding")
- None
- }
- }
-
- // typer is not able to digest a body of type Nothing being assigned result type Unit
- protected def caseResultType =
- if (body.tpe.isNothing) body.tpe else matchResultType
- }
-
- case class LiteralState(bx: Int, params: List[Symbol], body: Tree) extends State {
- def label = None
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) =
- (valDefsFor(subst), body.duplicate setType caseResultType)
- }
-
- case class FinalState(bx: Int, params: List[Symbol], body: Tree) extends State {
- traceCategory("Final State", "(%s) => %s", paramsString, body)
- def label = Some(labelDef)
-
- private lazy val labelDef = createLabelDef("body%" + bx, body, params, caseResultType)
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = {
- val tree =
- if (referenceCount > 1) ID(labelDef.symbol) APPLY identsFor(subst)
- else labelDef
-
- (valDefsFor(subst), tree)
- }
-
- private def paramsString = params map (s => s.name + ": " + s.tpe) mkString ", "
- override def toString() = pp("(%s) => %s".format(pp(params), body))
- }
-
- case class Rep(val tvars: PatternVarGroup, val rows: List[Row]) {
- lazy val Row(pats, subst, guard, index) = rows.head
- lazy val guardedRest = if (guard.isEmpty) Rep(Nil, Nil) else make(tvars, rows.tail)
- lazy val (defaults, others) = pats span (_.isDefault)
-
- /** Cut out the column containing the non-default pattern. */
- class Cut(index: Int) {
- /** The first two separate out the 'i'th pattern in each row from the remainder. */
- private val (_column, _rows) = rows map (_ extractColumn index) unzip
-
- /** Now the 'i'th tvar is separated out and used as a new Scrutinee. */
- private val (_pv, _tvars) = tvars extractIndex index
-
- /** The non-default pattern (others.head) replaces the column head. */
- private val (_ncol, _nrep) =
- (others.head :: _column.tail, make(_tvars, _rows))
-
- def mix() = {
- val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked))
- PatternMatch(newScrut, _ncol) mkRule _nrep
- }
- }
-
- /** Converts this to a tree - recursively acquires subreps. */
- final def toTree(): Tree = tracing("toTree")(typer typed applyRule())
-
- /** The VariableRule. */
- private def variable() = {
- val binding = (defaults map (_.boundVariables) zip tvars.pvs) .
- foldLeft(subst)((b, pair) => b.add(pair._1, pair._2.lhs))
-
- VariableRule(binding, guard, guardedRest, index)
- }
- /** The MixtureRule: picks a rewrite rule to apply. */
- private def mixture() = new Cut(defaults.size) mix()
-
- /** Applying the rule will result in one of:
- *
- * VariableRule - if all patterns are default patterns
- * MixtureRule - if one or more patterns are not default patterns
- * Error - no rows remaining
- */
- final def applyRule(): Tree =
- if (rows.isEmpty) failTree
- else if (others.isEmpty) variable.tree()
- else mixture.tree()
-
- def ppn(x: Any) = pp(x, newlines = true)
- override def toString() =
- if (tvars.isEmpty) "Rep(%d) = %s".format(rows.size, ppn(rows))
- else "Rep(%dx%d)%s%s".format(tvars.size, rows.size, ppn(tvars), ppn(rows))
- }
-
- /** Expands the patterns recursively. */
- final def expand(roots: List[PatternVar], cases: List[CaseDef]) = tracing("expand") {
- for ((CaseDef(pat, guard, body), bx) <- cases.zipWithIndex) yield {
- val subtrees = pat match {
- case x if roots.length <= 1 => List(x)
- case Apply(_, args) => args
- case WILD() => emptyTrees(roots.length)
- }
- val params = pat filter (_.isInstanceOf[Bind]) map (_.symbol) distinct
- val row = Row(toPats(subtrees), NoBinding, guard, bx)
- val state = body match {
- case x: Literal => LiteralState(bx, params, body)
- case _ => FinalState(bx, params, body)
- }
-
- row -> state
- }
- }
-
- /** returns the condition in "if (cond) k1 else k2"
- */
- final def condition(tpe: Type, scrut: Scrutinee, isBound: Boolean): Tree = {
- assert(scrut.isDefined)
- val cond = handleOuter(condition(tpe, scrut.id, isBound))
-
- if (!needsOuterTest(tpe, scrut.tpe, owner)) cond
- else addOuterCondition(cond, tpe, scrut.id)
- }
-
- final def condition(tpe: Type, scrutTree: Tree, isBound: Boolean): Tree = {
- assert((tpe ne NoType) && (scrutTree.tpe ne NoType))
- def isMatchUnlessNull = scrutTree.tpe <:< tpe && tpe.isAnyRef
- def isRef = scrutTree.tpe.isAnyRef
-
- // See ticket #1503 for the motivation behind checking for a binding.
- // The upshot is that it is unsound to assume equality means the right
- // type, but if the value doesn't appear on the right hand side of the
- // match that's unimportant; so we add an instance check only if there
- // is a binding.
- def bindingWarning() = {
- if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
- cunit.warning(scrutTree.pos,
- "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
- }
- }
-
- def genEquals(sym: Symbol): Tree = {
- val t1: Tree = REF(sym) MEMBER_== scrutTree
-
- if (isBound) {
- bindingWarning()
- t1 AND (scrutTree IS tpe.widen)
- }
- else t1
- }
-
- typer typed {
- tpe match {
- case ConstantType(Constant(null)) if isRef => scrutTree OBJ_EQ NULL
- case ConstantType(const) => scrutTree MEMBER_== Literal(const)
- case SingleType(NoPrefix, sym) => genEquals(sym)
- case SingleType(pre, sym) if sym.isStable => genEquals(sym)
- case ThisType(sym) if sym.isModule => genEquals(sym)
- case _ if isMatchUnlessNull => scrutTree OBJ_NE NULL
- case _ => scrutTree IS tpe
- }
- }
- }
-
- /** adds a test comparing the dynamic outer to the static outer */
- final def addOuterCondition(cond: Tree, tpe2test: Type, scrut: Tree) = {
- val TypeRef(prefix, _, _) = tpe2test
- val theRef = handleOuter(prefix match {
- case NoPrefix => abort("assertion failed: NoPrefix")
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
- })
- outerAccessor(tpe2test.typeSymbol) match {
- case NoSymbol => ifDebug(cunit.warning(scrut.pos, "no outer acc for " + tpe2test.typeSymbol)) ; cond
- case outerAcc =>
- val casted = gen.mkAsInstanceOf(scrut, tpe2test, any = true, wrapInApply = true)
- cond AND ((casted DOT outerAcc)() OBJ_EQ theRef)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
deleted file mode 100644
index 7b2fcf0e9b..0000000000
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-import scala.language.postfixOps
-
-trait PatternBindings extends ast.TreeDSL
-{
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import definitions.{ EqualsPatternClass }
- import CODE._
- import Debug._
-
- /** EqualsPattern **/
- def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass
- def mkEqualsRef(tpe: Type) = typeRef(NoPrefix, EqualsPatternClass, List(tpe))
- def decodedEqualsType(tpe: Type) =
- if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe
-
- // A subtype test which creates fresh existentials for type
- // parameters on the right hand side.
- def matches(arg1: Type, arg2: Type) = decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2)
-
- // For spotting duplicate unapplies
- def isEquivalentTree(t1: Tree, t2: Tree) = (t1.symbol == t2.symbol) && (t1 equalsStructure t2)
-
- // Reproduce the Bind trees wrapping oldTree around newTree
- def moveBindings(oldTree: Tree, newTree: Tree): Tree = oldTree match {
- case b @ Bind(x, body) => Bind(b.symbol, moveBindings(body, newTree))
- case _ => newTree
- }
-
- // used as argument to `EqualsPatternClass`
- case class PseudoType(o: Tree) extends SimpleTypeProxy {
- override def underlying: Type = o.tpe
- override def safeToString: String = "PseudoType("+o+")"
- }
-
- // If the given pattern contains alternatives, return it as a list of patterns.
- // Makes typed copies of any bindings found so all alternatives point to final state.
- def extractBindings(p: Pattern): List[Pattern] =
- toPats(_extractBindings(p.boundTree, identity))
-
- private def _extractBindings(p: Tree, prevBindings: Tree => Tree): List[Tree] = {
- def newPrev(b: Bind) = (x: Tree) => treeCopy.Bind(b, b.name, x) setType x.tpe
-
- p match {
- case b @ Bind(_, body) => _extractBindings(body, newPrev(b))
- case Alternative(ps) => ps map prevBindings
- }
- }
-
- trait PatternBindingLogic {
- self: Pattern =>
-
- // This is for traversing the pattern tree - pattern types which might have
- // bound variables beneath them return a list of said patterns for flatMapping.
- def subpatternsForVars: List[Pattern] = Nil
-
- // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree.
- private var _boundTree: Tree = tree
- def boundTree = _boundTree
- def setBound(x: Bind): Pattern = {
- _boundTree = x
- this
- }
- def boundVariables = strip(boundTree)
-
- // If a tree has bindings, boundTree looks something like
- // Bind(v3, Bind(v2, Bind(v1, tree)))
- // This takes the given tree and creates a new pattern
- // using the same bindings.
- def rebindTo(t: Tree): Pattern = Pattern(moveBindings(boundTree, t))
-
- // Wrap this pattern's bindings around (_: Type)
- def rebindToType(tpe: Type, ascription: Type = null): Pattern = {
- val aType = if (ascription == null) tpe else ascription
- rebindTo(Typed(WILD(tpe), TypeTree(aType)) setType tpe)
- }
-
- // Wrap them around _
- def rebindToEmpty(tpe: Type): Pattern =
- rebindTo(Typed(EmptyTree, TypeTree(tpe)) setType tpe)
-
- // Wrap them around a singleton type for an EqualsPattern check.
- def rebindToEqualsCheck(): Pattern =
- rebindToType(equalsCheck)
-
- // Like rebindToEqualsCheck, but subtly different. Not trying to be
- // mysterious -- I haven't sorted it all out yet.
- def rebindToObjectCheck(): Pattern =
- rebindToType(mkEqualsRef(sufficientType), sufficientType)
-
- /** Helpers **/
- private def wrapBindings(vs: List[Symbol], pat: Tree): Tree = vs match {
- case Nil => pat
- case x :: xs => Bind(x, wrapBindings(xs, pat)) setType pat.tpe
- }
- private def strip(t: Tree): List[Symbol] = t match {
- case b @ Bind(_, pat) => b.symbol :: strip(pat)
- case _ => Nil
- }
- private def deepstrip(t: Tree): List[Symbol] =
- treeCollect(t, { case x: Bind => x.symbol })
- }
-
- case class Binding(pvar: Symbol, tvar: Symbol) {
- override def toString() = pvar.name + " -> " + tvar.name
- }
-
- class Bindings(private val vlist: List[Binding]) {
- // if (!vlist.isEmpty)
- // traceCategory("Bindings", this.toString)
-
- def get() = vlist
- def toMap = vlist map (x => (x.pvar, x.tvar)) toMap
-
- def add(vs: Iterable[Symbol], tvar: Symbol): Bindings = {
- val newBindings = vs.toList map (v => Binding(v, tvar))
- new Bindings(newBindings ++ vlist)
- }
-
- override def toString() =
- if (vlist.isEmpty) "<none>"
- else vlist.mkString(", ")
- }
-
- val NoBinding: Bindings = new Bindings(Nil)
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
deleted file mode 100644
index f116a7c4c7..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ /dev/null
@@ -1,499 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import symtab.Flags
-import PartialFunction._
-
-/** Patterns are wrappers for Trees with enhanced semantics.
- *
- * @author Paul Phillips
- */
-
-trait Patterns extends ast.TreeDSL {
- self: transform.ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions._
- import CODE._
- import Debug._
- import treeInfo.{ unbind, isStar, isVarPattern }
-
- type PatternMatch = MatchMatrix#PatternMatch
- private type PatternVar = MatrixContext#PatternVar
-
- // Fresh patterns
- def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
- def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
-
- // An empty pattern
- def NoPattern = WildcardPattern()
-
- // The constant null pattern
- def NullPattern = LiteralPattern(NULL)
-
- // The Nil pattern
- def NilPattern = Pattern(gen.mkNil)
-
- // 8.1.1
- case class VariablePattern(tree: Ident) extends NamePattern {
- lazy val Ident(name) = tree
- require(isVarPattern(tree) && name != nme.WILDCARD)
- override def covers(sym: Symbol) = true
- override def description = "%s".format(name)
- }
-
- // 8.1.1 (b)
- case class WildcardPattern() extends Pattern {
- def tree = EmptyTree
- override def covers(sym: Symbol) = true
- override def isDefault = true
- override def description = "_"
- }
-
- // 8.1.2
- case class TypedPattern(tree: Typed) extends Pattern {
- lazy val Typed(expr, tpt) = tree
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe)
- override def sufficientType = tpt.tpe
- override def subpatternsForVars: List[Pattern] = List(Pattern(expr))
- override def simplify(pv: PatternVar) = Pattern(expr) match {
- case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
- case _ => this
- }
- override def description = "%s: %s".format(Pattern(expr), tpt)
- }
-
- // 8.1.3
- case class LiteralPattern(tree: Literal) extends Pattern {
- lazy val Literal(const @ Constant(value)) = tree
-
- def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true }
- def intValue = const.intValue
- override def description = {
- val s = if (value == null) "null" else value.toString
- "Lit(%s)".format(s)
- }
- }
-
- // 8.1.4 (a)
- case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern {
- // XXX - see bug 3411 for code which violates this assumption
- // require (!isVarPattern(fn) && args.isEmpty)
- lazy val ident @ Ident(name) = fn
-
- override def sufficientType = Pattern(ident).equalsCheck
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Id(%s)".format(name)
- }
- // 8.1.4 (b)
- case class ApplySelectPattern(tree: Apply) extends ApplyPattern with SelectPattern {
- require (args.isEmpty)
- lazy val Apply(select: Select, _) = tree
-
- override lazy val sufficientType = qualifier.tpe match {
- case t: ThisType => singleType(t, sym) // this.X
- case _ =>
- qualifier match {
- case _: Apply => PseudoType(tree)
- case _ => singleType(Pattern(qualifier).necessaryType, sym)
- }
- }
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = backticked match {
- case Some(s) => "this." + s
- case _ => "Sel(%s.%s)".format(Pattern(qualifier), name)
- }
-
- }
- // 8.1.4 (c)
- case class StableIdPattern(tree: Select) extends SelectPattern {
- def select = tree
- override def description = "St(%s)".format(printableSegments.mkString(" . "))
- private def printableSegments =
- pathSegments filter (x => !x.isEmpty && (x.toString != "$iw"))
- }
- // 8.1.4 (d)
- case class ObjectPattern(tree: Apply) extends ApplyPattern { // NamePattern?
- require(!fn.isType && isModule)
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Obj(%s)".format(fn)
- }
- // 8.1.4 (e)
- case class SimpleIdPattern(tree: Ident) extends NamePattern {
- val Ident(name) = tree
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow)
- override def description = "Id(%s)".format(name)
- }
-
- // 8.1.5
- case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern {
- require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn)
- def name = tpe.typeSymbol.name
- def cleanName = tpe.typeSymbol.decodedName
- def hasPrefix = tpe.prefix.prefixString != ""
- def prefixedName =
- if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName)
- else cleanName
-
- private def isColonColon = cleanName == "::"
-
- override def subpatterns(pm: MatchMatrix#PatternMatch) =
- if (pm.head.isCaseClass) toPats(args)
- else super.subpatterns(pm)
-
- override def simplify(pv: PatternVar) =
- if (args.isEmpty) this rebindToEmpty tree.tpe
- else this
-
- override def covers(sym: Symbol) = {
- debugging("[constructor] Does " + this + " cover " + sym + " ? ") {
- sym.tpe.typeSymbol == this.tpe.typeSymbol
- }
- }
- override def description = {
- if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1)))
- else "%s(%s)".format(name, toPats(args).mkString(", "))
- }
- }
- // 8.1.6
- case class TuplePattern(tree: Apply) extends ApplyPattern {
- override def description = "((%s))".format(args.size, toPats(args).mkString(", "))
- }
-
- // 8.1.7 / 8.1.8 (unapply and unapplySeq calls)
- case class ExtractorPattern(tree: UnApply) extends UnapplyPattern {
- private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< arg.tpe) this
- else this rebindTo uaTyped
- }
- override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString)
- }
-
- // Special List handling. It was like that when I got here.
- case class ListExtractorPattern(tree: UnApply, tpt: Tree, elems: List[Tree]) extends UnapplyPattern with SequenceLikePattern {
- // As yet I can't testify this is doing any good relative to using
- // tpt.tpe, but it doesn't seem to hurt either.
- private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
- private lazy val consRef = appliedType(ConsClass, packedType)
- private lazy val listRef = appliedType(ListClass, packedType)
- private lazy val seqRef = appliedType(SeqClass, packedType)
-
- private def thisSeqRef = {
- val tc = (tree.tpe baseType SeqClass).typeConstructor
- if (tc.typeParams.size == 1) appliedType(tc, List(packedType))
- else seqRef
- }
-
- // Fold a list into a well-typed x :: y :: etc :: tree.
- private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
- case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
- case _ =>
- val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy"))
- val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
-
- Apply(TypeTree(consType), List(hd, tl)) setType consRef
- }
- private def foldedPatterns = elems.foldRight(gen.mkNil)((x, y) => listFolder(x, y))
- override def necessaryType = if (nonStarPatterns.nonEmpty) consRef else listRef
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< necessaryType)
- Pattern(foldedPatterns)
- else
- this rebindTo (Typed(tree, TypeTree(necessaryType)) setType necessaryType)
- }
- override def description = "List(%s => %s)".format(packedType, resTypesString)
- }
-
- trait SequenceLikePattern extends Pattern {
- def elems: List[Tree]
- override def hasStar = elems.nonEmpty && isStar(elems.last)
-
- def elemPatterns = toPats(elems)
- def nonStarElems = if (hasStar) elems.init else elems
- def nonStarPatterns = toPats(nonStarElems)
- def nonStarLength = nonStarElems.length
- }
-
- // 8.1.8 (b) (literal ArrayValues)
- case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern {
- lazy val ArrayValue(elemtpt, elems) = tree
-
- override def subpatternsForVars: List[Pattern] = elemPatterns
- override def description = "Seq(%s)".format(elemPatterns mkString ", ")
- }
-
- // 8.1.8 (c)
- case class StarPattern(tree: Star) extends Pattern {
- lazy val Star(elem) = tree
- override def description = "_*"
- }
- // XXX temporary?
- case class ThisPattern(tree: This) extends NamePattern {
- lazy val This(name) = tree
- override def description = "this"
- }
-
- // 8.1.9
- // InfixPattern ... subsumed by Constructor/Extractor Patterns
-
- // 8.1.10
- case class AlternativePattern(tree: Alternative) extends Pattern {
- private lazy val Alternative(subtrees) = tree
- private def alts = toPats(subtrees)
- override def description = "Alt(%s)".format(alts mkString " | ")
- }
-
- // 8.1.11
- // XMLPattern ... for now, subsumed by SequencePattern, but if we want
- // to make it work right, it probably needs special handling.
-
- private def abortUnknownTree(tree: Tree) =
- abort("Unknown Tree reached pattern matcher: %s/%s".format(tree, tree.getClass))
-
- object Pattern {
- // a small tree -> pattern cache
- private val cache = perRunCaches.newMap[Tree, Pattern]()
-
- def apply(tree: Tree): Pattern = {
- if (cache contains tree)
- return cache(tree)
-
- val p = tree match {
- case x: Bind => apply(unbind(tree)) setBound x
- case EmptyTree => WildcardPattern()
- case Ident(nme.WILDCARD) => WildcardPattern()
- case x @ Alternative(ps) => AlternativePattern(x)
- case x: Apply => ApplyPattern(x)
- case x: Typed => TypedPattern(x)
- case x: Literal => LiteralPattern(x)
- case x: UnApply => UnapplyPattern(x)
- case x: Ident => if (isVarPattern(x)) VariablePattern(x) else SimpleIdPattern(x)
- case x: ArrayValue => SequencePattern(x)
- case x: Select => StableIdPattern(x)
- case x: Star => StarPattern(x)
- case x: This => ThisPattern(x) // XXX ?
- case _ => abortUnknownTree(tree)
- }
- cache(tree) = p
-
- // limiting the trace output
- p match {
- case WildcardPattern() => p
- case _: LiteralPattern => p
- case _ => tracing("Pattern")(p)
- }
- }
- // matching on Pattern(...) always skips the bindings.
- def unapply(other: Any): Option[Tree] = other match {
- case x: Tree => unapply(Pattern(x))
- case x: Pattern => Some(x.tree)
- case _ => None
- }
- }
-
- object UnapplyPattern {
- private object UnapplySeq {
- def unapply(x: UnApply) = x match {
- case UnApply(
- Apply(TypeApply(Select(qual, nme.unapplySeq), List(tpt)), _),
- List(ArrayValue(_, elems))) =>
- Some((qual.symbol, tpt, elems))
- case _ =>
- None
- }
- }
-
- def apply(x: UnApply): Pattern = x match {
- case UnapplySeq(ListModule, tpt, elems) =>
- ListExtractorPattern(x, tpt, elems)
- case _ =>
- ExtractorPattern(x)
- }
- }
-
- // right now a tree like x @ Apply(fn, Nil) where !fn.isType
- // is handled by creating a singleton type:
- //
- // val stype = Types.singleType(x.tpe.prefix, x.symbol)
- //
- // and then passing that as a type argument to EqualsPatternClass:
- //
- // val tpe = typeRef(NoPrefix, EqualsPatternClass, List(stype))
- //
- // then creating a Typed pattern and rebinding.
- //
- // val newpat = Typed(EmptyTree, TypeTree(tpe)) setType tpe)
- //
- // This is also how Select(qual, name) is handled.
- object ApplyPattern {
- def apply(x: Apply): Pattern = {
- val Apply(fn, args) = x
- def isModule = x.symbol.isModule || x.tpe.termSymbol.isModule
-
- if (fn.isType) {
- if (isTupleType(fn.tpe)) TuplePattern(x)
- else ConstructorPattern(x)
- }
- else if (args.isEmpty) {
- if (isModule) ObjectPattern(x)
- else fn match {
- case _: Ident => ApplyIdentPattern(x)
- case _: Select => ApplySelectPattern(x)
- }
- }
- else abortUnknownTree(x)
- }
- }
-
- /** Some intermediate pattern classes with shared structure **/
-
- sealed trait SelectPattern extends NamePattern {
- def select: Select
- lazy val Select(qualifier, name) = select
- def pathSegments = getPathSegments(tree)
- def backticked: Option[String] = qualifier match {
- case _: This if nme.isVariableName(name) => Some("`%s`".format(name))
- case _ => None
- }
- override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe)
- protected def getPathSegments(t: Tree): List[Name] = t match {
- case Select(q, name) => name :: getPathSegments(q)
- case Apply(f, Nil) => getPathSegments(f)
- case _ => Nil
- }
- }
-
- sealed trait NamePattern extends Pattern {
- def name: Name
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToEqualsCheck()
- override def description = name.toString
- }
-
- sealed trait UnapplyPattern extends Pattern {
- lazy val UnApply(unfn, args) = tree
- lazy val Apply(fn, _) = unfn
- lazy val MethodType(List(arg, _*), _) = fn.tpe
-
- // Covers if the symbol matches the unapply method's argument type,
- // and the return type of the unapply is Some.
- override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe)
-
- // TODO: for alwaysCovers:
- // fn.tpe.finalResultType.typeSymbol == SomeClass
-
- override def necessaryType = arg.tpe
- override def subpatternsForVars = args match {
- case List(ArrayValue(elemtpe, elems)) => toPats(elems)
- case _ => toPats(args)
- }
-
- def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args.length)
- def resTypesString = resTypes match {
- case Nil => "Boolean"
- case xs => xs.mkString(", ")
- }
- }
-
- sealed trait ApplyPattern extends Pattern {
- lazy val Apply(fn, args) = tree
- override def subpatternsForVars: List[Pattern] = toPats(args)
-
- override def dummies =
- if (!this.isCaseClass) Nil
- else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size)
-
- def isConstructorPattern = fn.isType
- override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe)
- }
-
- sealed abstract class Pattern extends PatternBindingLogic {
- def tree: Tree
-
- // returns either a simplification of this pattern or identity.
- def simplify(pv: PatternVar): Pattern = this
-
- // the right number of dummies for this pattern
- def dummies: List[Pattern] = Nil
-
- // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher)
- def isDefault = false
-
- // what type must a scrutinee have to have any chance of matching this pattern?
- def necessaryType = tpe
-
- // what type could a scrutinee have which would automatically indicate a match?
- // (nullness and guards will still be checked.)
- def sufficientType = tpe
-
- // the subpatterns for this pattern (at the moment, that means constructor arguments)
- def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies
-
- // if this pattern should be considered to cover the given symbol
- def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType)
- def newMatchesPattern(sym: Symbol, pattp: Type) = {
- debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") {
- (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) ||
- (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp))
- }
- }
-
- def sym = tree.symbol
- def tpe = tree.tpe
- def isEmpty = tree.isEmpty
-
- def isModule = sym.isModule || tpe.termSymbol.isModule
- def isCaseClass = tpe.typeSymbol.isCase
- def isObject = (sym != null) && (sym != NoSymbol) && tpe.prefix.isStable // XXX not entire logic
-
- def hasStar = false
-
- def setType(tpe: Type): this.type = {
- tree setType tpe
- this
- }
-
- def equalsCheck =
- tracing("equalsCheck")(
- if (sym.isValue) singleType(NoPrefix, sym)
- else tpe.narrow
- )
-
- /** Standard methods **/
- override def equals(other: Any) = other match {
- case x: Pattern => this.boundTree == x.boundTree
- case _ => super.equals(other)
- }
- override def hashCode() = boundTree.hashCode()
- def description = super.toString
-
- final override def toString = description
-
- def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType)
- def kindString = ""
- }
-
- /*** Extractors ***/
-
- object UnapplyParamType {
- def unapply(x: Tree): Option[Type] = condOpt(unbind(x)) {
- case UnApply(Apply(fn, _), _) => fn.tpe match {
- case m: MethodType => m.paramTypes.head
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index 00a9f3b39c..ee1668a38a 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -6,9 +6,21 @@
package scala.tools
package object nsc {
+ type Mode = scala.reflect.internal.Mode
+ val Mode = scala.reflect.internal.Mode
+
+ def EXPRmode = Mode.EXPRmode
+ def BYVALmode = Mode.BYVALmode
+ def POLYmode = Mode.POLYmode
+ def TAPPmode = Mode.TAPPmode
+ def FUNmode = Mode.FUNmode
+
type Phase = scala.reflect.internal.Phase
val NoPhase = scala.reflect.internal.NoPhase
+ type Variance = scala.reflect.internal.Variance
+ val Variance = scala.reflect.internal.Variance
+
type FatalError = scala.reflect.internal.FatalError
val FatalError = scala.reflect.internal.FatalError
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 2050ce7ffd..b0113f7696 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -6,13 +6,14 @@
package scala.tools.nsc
package plugins
-import io.{ File, Path, Jar }
-import java.net.URLClassLoader
-import java.util.jar.JarFile
+import scala.tools.nsc.io.{ Jar }
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.io.{ Directory, File, Path }
+import java.io.InputStream
import java.util.zip.ZipException
-import scala.collection.mutable
-import mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
+import scala.util.{ Try, Success, Failure }
import scala.xml.XML
/** Information about a plugin loaded from a jar file.
@@ -37,11 +38,13 @@ abstract class Plugin {
val description: String
/** The compiler that this plugin uses. This is normally equated
- * to a constructor parameter in the concrete subclass. */
+ * to a constructor parameter in the concrete subclass.
+ */
val global: Global
/** Handle any plugin-specific options. The `-P:plugname:` part
- * will not be present. */
+ * will not be present.
+ */
def processOptions(options: List[String], error: String => Unit) {
if (!options.isEmpty)
error("Error: " + name + " has no options")
@@ -63,90 +66,86 @@ object Plugin {
private val PluginXML = "scalac-plugin.xml"
- /** Create a class loader with the specified file plus
+ /** Create a class loader with the specified locations plus
* the loader that loaded the Scala compiler.
*/
- private def loaderFor(jarfiles: Seq[Path]): ClassLoader = {
+ private def loaderFor(locations: Seq[Path]): ScalaClassLoader = {
val compilerLoader = classOf[Plugin].getClassLoader
- val jarurls = jarfiles map (_.toURL)
+ val urls = locations map (_.toURL)
- new URLClassLoader(jarurls.toArray, compilerLoader)
+ ScalaClassLoader fromURLs (urls, compilerLoader)
}
- /** Try to load a plugin description from the specified
- * file, returning <code>None</code> if it does not work.
+ /** Try to load a plugin description from the specified location.
*/
- private def loadDescription(jarfile: Path): Option[PluginDescription] =
- // XXX Return to this once we have some ARM support
- if (!jarfile.exists) None
- else try {
- val jar = new JarFile(jarfile.jfile)
-
- try {
- jar getEntry PluginXML match {
- case null => None
- case entry =>
- val in = jar getInputStream entry
- val packXML = XML load in
- in.close()
-
- PluginDescription fromXML packXML
- }
- }
- finally jar.close()
- }
- catch {
- case _: ZipException => None
+ private def loadDescriptionFromJar(jarp: Path): Try[PluginDescription] = {
+ // XXX Return to this once we have more ARM support
+ def read(is: Option[InputStream]) = is match {
+ case None => throw new RuntimeException(s"Missing $PluginXML in $jarp")
+ case _ => PluginDescription fromXML (XML load is.get)
}
+ Try(new Jar(jarp.jfile).withEntryStream(PluginXML)(read))
+ }
+
+ private def loadDescriptionFromFile(f: Path): Try[PluginDescription] =
+ Try(XML loadFile f.jfile) map (PluginDescription fromXML _)
type AnyClass = Class[_]
- /** Loads a plugin class from the named jar file.
+ /** Use a class loader to load the plugin class.
*
- * @return `None` if the jar file has no plugin in it or
- * if the plugin is badly formed.
+ * @return `None` on failure
*/
- def loadFrom(jarfile: Path, loader: ClassLoader): Option[AnyClass] =
- loadDescription(jarfile) match {
- case None =>
- println("Warning: could not load descriptor for plugin %s".format(jarfile))
- None
- case Some(pdesc) =>
- try Some(loader loadClass pdesc.classname) catch {
- case _: Exception =>
- println("Warning: class not found for plugin in %s (%s)".format(jarfile, pdesc.classname))
- None
- }
+ def load(pd: PluginDescription, loader: ClassLoader): Try[AnyClass] = {
+ Try[AnyClass] {
+ loader loadClass pd.classname
+ } recoverWith {
+ case _: Exception =>
+ Failure(new RuntimeException(s"Warning: class not found: ${pd.classname}"))
}
+ }
- /** Load all plugins found in the argument list, both in the
- * jar files explicitly listed, and in the jar files in the
- * directories specified. Skips all plugins in `ignoring`.
+ /** Load all plugins specified by the arguments.
+ * Each of `jars` must be a valid plugin archive or exploded archive.
+ * Each of `dirs` may be a directory containing arbitrary plugin archives.
+ * Skips all plugins named in `ignoring`.
* A single classloader is created and used to load all of them.
*/
def loadAllFrom(
jars: List[Path],
dirs: List[Path],
- ignoring: List[String]): List[AnyClass] =
+ ignoring: List[String]): List[Try[AnyClass]] =
{
- val alljars = (jars ::: (for {
- dir <- dirs if dir.isDirectory
- entry <- dir.toDirectory.files.toList sortBy (_.name)
-// was: if Path.isJarOrZip(entry)
- if Jar.isJarOrZip(entry)
- pdesc <- loadDescription(entry)
- if !(ignoring contains pdesc.name)
- } yield entry)).distinct
-
- val loader = loaderFor(alljars)
- (alljars map (loadFrom(_, loader))).flatten
+ // List[(jar, Success(descriptor))] in dir
+ def scan(d: Directory) = for {
+ f <- d.files.toList sortBy (_.name)
+ if Jar isJarOrZip f
+ pd = loadDescriptionFromJar(f)
+ if pd.isSuccess
+ } yield (f, pd)
+ // (dir, Try(descriptor))
+ def explode(d: Directory) = d -> loadDescriptionFromFile(d / PluginXML)
+ // (j, Try(descriptor))
+ def required(j: Path) = j -> loadDescriptionFromJar(j)
+
+ type Paired = Pair[Path, Try[PluginDescription]]
+ val included: List[Paired] = (dirs flatMap (_ ifDirectory scan)).flatten
+ val exploded: List[Paired] = jars flatMap (_ ifDirectory explode)
+ val explicit: List[Paired] = jars flatMap (_ ifFile required)
+ def ignored(p: Paired) = p match {
+ case (path, Success(pd)) => ignoring contains pd.name
+ case _ => false
+ }
+ val (locs, pds) = ((explicit ::: exploded ::: included) filterNot ignored).unzip
+
+ val loader = loaderFor(locs.distinct)
+ pds filter (_.isSuccess) map (_.get) map (Plugin load (_, loader))
}
/** Instantiate a plugin class, given the class and
* the compiler it is to be used in.
*/
def instantiate(clazz: AnyClass, global: Global): Plugin = {
- val constructor = clazz getConstructor classOf[Global]
- (constructor newInstance global).asInstanceOf[Plugin]
+ (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin]
}
}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
index 4d98b2563c..c6e1af7ea4 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
@@ -18,8 +18,12 @@ abstract class PluginComponent extends SubComponent {
/** Internal flag to tell external from internal phases */
final override val internal = false
- /** Phases supplied by plugins should not have give the runsRightAfter constraint,
- * but can override it */
+ /** Phases supplied by plugins should not have to supply the
+ * runsRightAfter constraint, but can override it.
+ */
val runsRightAfter: Option[String] = None
+ /** Useful for -Xshow-phases. */
+ def description: String = ""
+
}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
index bd567400fb..27693d1a45 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
@@ -6,27 +6,22 @@
package scala.tools.nsc
package plugins
-import scala.xml.{Node,NodeSeq}
+import scala.xml.Node
/** A description of a compiler plugin, suitable for serialization
* to XML for inclusion in the plugin's .jar file.
*
* @author Lex Spoon
* @version 1.0, 2007-5-21
+ * @param name A short name of the plugin, used to identify it in
+ * various contexts. The phase defined by the plugin
+ * should have the same name.
+ * @param classname The name of the main Plugin class.
*/
-abstract class PluginDescription {
-
- /** A short name of the compiler, used to identify it in
- * various contexts. The phase defined by the plugin
- * should have the same name.
- */
- val name: String
-
- /** The name of the main class for the plugin */
- val classname: String
+case class PluginDescription(name: String, classname: String) {
/** An XML representation of this description. It can be
- * read back using <code>PluginDescription.fromXML</code>.
+ * read back using `PluginDescription.fromXML`.
* It should be stored inside the jar archive file.
*/
def toXML: Node = {
@@ -44,32 +39,24 @@ abstract class PluginDescription {
*/
object PluginDescription {
- def fromXML(xml: Node): Option[PluginDescription] = {
- // check the top-level tag
- xml match {
- case <plugin>{_*}</plugin> => ()
- case _ => return None
- }
+ def fromXML(xml: Node): PluginDescription = {
// extract one field
def getField(field: String): Option[String] = {
val text = (xml \\ field).text.trim
if (text == "") None else Some(text)
}
-
- // extract the required fields
- val name1 = getField("name") match {
- case None => return None
- case Some(str) => str
+ def extracted = {
+ val name = "name"
+ val claas = "classname"
+ val vs = Map(name -> getField(name), claas -> getField(claas))
+ if (vs.values exists (_.isEmpty)) fail()
+ else PluginDescription(name = vs(name).get, classname = vs(claas).get)
}
- val classname1 = getField("classname") match {
- case None => return None
- case Some(str) => str
+ def fail() = throw new RuntimeException("Bad plugin descriptor.")
+ // check the top-level tag
+ xml match {
+ case <plugin>{_*}</plugin> => extracted
+ case _ => fail()
}
-
- Some(new PluginDescription {
- val name = name1
- val classname = classname1
- })
}
-
}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 736bd826e4..00e5875852 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -7,7 +7,8 @@
package scala.tools.nsc
package plugins
-import io.{ File, Path }
+import scala.reflect.io.{ File, Path }
+import scala.tools.util.PathResolver.Defaults
/** Support for run-time loading of compiler plugins.
*
@@ -25,8 +26,14 @@ trait Plugins {
*/
protected def loadRoughPluginsList(): List[Plugin] = {
val jars = settings.plugin.value map Path.apply
- val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
- val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
+ def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s
+ val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map injectDefault map Path.apply
+ val maybes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
+ val (goods, errors) = maybes partition (_.isSuccess)
+ errors foreach (_ recover {
+ case e: Exception => inform(e.getMessage)
+ })
+ val classes = goods map (_.get) // flatten
// Each plugin must only be instantiated once. A common pattern
// is to register annotation checkers during object construction, so
@@ -34,7 +41,7 @@ trait Plugins {
classes map (Plugin.instantiate(_, this))
}
- protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList
+ protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList()
/** Load all available plugins. Skips plugins that
* either have the same name as another one, or which
@@ -95,7 +102,7 @@ trait Plugins {
plugs
}
- lazy val plugins: List[Plugin] = loadPlugins
+ lazy val plugins: List[Plugin] = loadPlugins()
/** A description of all the plugins that are loaded */
def pluginDescriptions: String =
@@ -106,7 +113,7 @@ trait Plugins {
* @see phasesSet
*/
protected def computePluginPhases(): Unit =
- phasesSet ++= (plugins flatMap (_.components))
+ for (p <- plugins; c <- p.components) addToPhasesSet(c, c.description)
/** Summary of the options for all loaded plugins */
def pluginOptionsHelp: String =
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index c7ee11dec0..44670ea578 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -21,19 +21,15 @@ abstract class AbstractReporter extends Reporter {
private val positions = new mutable.HashMap[Position, Severity]
override def reset() {
- super.reset
- positions.clear
+ super.reset()
+ positions.clear()
}
private def isVerbose = settings.verbose.value
private def noWarnings = settings.nowarnings.value
private def isPromptSet = settings.prompt.value
- protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) {
- val severity =
- if (settings.fatalWarnings.value && _severity == WARNING) ERROR
- else _severity
-
+ protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
if (severity == INFO) {
if (isVerbose || force) {
severity.count += 1
@@ -53,7 +49,7 @@ abstract class AbstractReporter extends Reporter {
}
if (isPromptSet)
- displayPrompt
+ displayPrompt()
}
}
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index e847fb5b86..bda195f9d3 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -34,9 +34,6 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
}
/** Returns the number of errors issued totally as a string.
- *
- * @param severity ...
- * @return ...
*/
private def getCountString(severity: Severity): String =
StringOps.countElementsAsString((severity).count, label(severity))
@@ -52,17 +49,12 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
printMessage(pos, clabel(severity) + msg)
}
- /**
- * @param pos ...
- */
def printSourceLine(pos: Position) {
printMessage(pos.lineContent.stripLineEnd)
printColumnMarker(pos)
}
/** Prints the column marker of the given position.
- *
- * @param pos ...
*/
def printColumnMarker(pos: Position) =
if (pos.isDefined) { printMessage(" " * (pos.column - 1) + "^") }
@@ -94,6 +86,5 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
}
}
- private def abort(msg: String) = throw new Error(msg)
override def flush() { writer.flush() }
}
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 8871ae6555..0544da5d3c 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package reporters
import scala.reflect.internal.util._
-import scala.reflect.internal.util.StringOps._
/**
* This interface provides methods to issue information, warning and
@@ -59,15 +58,15 @@ abstract class Reporter {
/** For sending a message which should not be labeled as a warning/error,
* but also shouldn't require -verbose to be visible.
*/
- def echo(msg: String): Unit = info(NoPosition, msg, true)
- def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
+ def echo(msg: String): Unit = info(NoPosition, msg, force = true)
+ def echo(pos: Position, msg: String): Unit = info(pos, msg, force = true)
/** Informational messages, suppressed unless -verbose or force=true. */
def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
/** Warnings and errors. */
- def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
- def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
+ def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, force = false))
+ def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, force = false))
def incompleteInputError(pos: Position, msg: String): Unit = {
if (incompleteHandled) incompleteHandler(pos, msg)
else error(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
index 10e9982594..3aecc06b1e 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
@@ -2,9 +2,6 @@ package scala.tools.nsc.scratchpad
import java.io.{FileInputStream, InputStreamReader, IOException}
-import scala.runtime.ScalaRunTime.stringOf
-import java.lang.reflect.InvocationTargetException
-import scala.reflect.runtime.ReflectionUtils._
import scala.collection.mutable.ArrayBuffer
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
index 01dccd7521..61c1717fea 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
@@ -1,8 +1,6 @@
package scala.tools.nsc
package scratchpad
-import java.io.Writer
-import scala.reflect.internal.util.SourceFile
import scala.reflect.internal.Chars._
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index adabeb02a3..4727e6d867 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -47,8 +47,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
}
})
- implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered
-
trait AbsSetting extends Ordered[Setting] with AbsSettingValue {
def name: String
def helpDescription: String
@@ -83,14 +81,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
this
}
- /** If the appearance of the setting should halt argument processing. */
- private var isTerminatorSetting = false
- def shouldStopProcessing = isTerminatorSetting
- def stopProcessing(): this.type = {
- isTerminatorSetting = true
- this
- }
-
/** Issue error and return */
def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
@@ -110,6 +100,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
/** Attempt to set from a properties file style property value.
* Currently used by Eclipse SDT only.
+ * !!! Needs test.
*/
def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil)
@@ -133,7 +124,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
case _ => false
}
override def hashCode() = name.hashCode + value.hashCode
- override def toString() = name + " = " + value
+ override def toString() = name + " = " + (if (value == "") "\"\"" else value)
}
trait InternalSetting extends AbsSetting {
diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
deleted file mode 100644
index 0bec113743..0000000000
--- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-trait AdvancedScalaSettings {
- self: AbsScalaSettings =>
-
- abstract class X extends SettingGroup("-X") {
- val assemextdirs: StringSetting
- val assemname: StringSetting
- val assempath: StringSetting
- val checkinit: BooleanSetting
- val disableassertions: BooleanSetting
- val elidebelow: IntSetting
- val experimental: BooleanSetting
- val future: BooleanSetting
- val generatephasegraph: StringSetting
- val logimplicits: BooleanSetting
- val mainClass: StringSetting
- val migration: BooleanSetting
- val noforwarders: BooleanSetting
- val nojline: BooleanSetting
- val nouescape: BooleanSetting
- val plugin: MultiStringSetting
- val plugindisable: MultiStringSetting
- val pluginlist: BooleanSetting
- val pluginrequire: MultiStringSetting
- val pluginsdir: StringSetting
- val print: PhasesSetting
- val printicode: BooleanSetting
- val printpos: BooleanSetting
- val printtypes: BooleanSetting
- val prompt: BooleanSetting
- val resident: BooleanSetting
- val script: StringSetting
- val showclass: StringSetting
- val showobject: StringSetting
- val showphases: BooleanSetting
- val sourcedir: StringSetting
- val sourcereader: StringSetting
- }
- // def Xexperimental = X.experimental
- // def Xmigration28 = X.migration
- // def Xnojline = X.nojline
- // def Xprint = X.print
- // def Xprintpos = X.printpos
- // def Xshowcls = X.showclass
- // def Xshowobj = X.showobject
- // def assemextdirs = X.assemextdirs
- // def assemname = X.assemname
- // def assemrefs = X.assempath
- // def checkInit = X.checkinit
- // def disable = X.plugindisable
- // def elideLevel = X.elidelevel
- // def future = X.future
- // def genPhaseGraph = X.generatephasegraph
- // def logimplicits = X.logimplicits
- // def noForwarders = X.noforwarders
- // def noassertions = X.disableassertions
- // def nouescape = X.nouescape
- // def plugin = X.plugin
- // def pluginsDir = X.pluginsdir
- // def printtypes = X.printtypes
- // def prompt = X.prompt
- // def require = X.require
- // def resident = X.resident
- // def script = X.script
- // def showPhases = X.showphases
- // def showPlugins = X.pluginlist
- // def sourceReader = X.sourcereader
- // def sourcedir = X.sourcedir
- // def writeICode = X.printicode
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
deleted file mode 100644
index da2c89d707..0000000000
--- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-/** Taking flag checking to a somewhat higher level. */
-trait AestheticSettings {
- def settings: Settings
-
- // Some(value) if setting has been set by user, None otherwise.
- def optSetting[T](s: Settings#Setting): Option[T] =
- if (s.isDefault) None else Some(s.value.asInstanceOf[T])
-
- def script = optSetting[String](settings.script)
- def encoding = optSetting[String](settings.encoding)
- def sourceReader = optSetting[String](settings.sourceReader)
-
- def debug = settings.debug.value
- def declsOnly = false
- def deprecation = settings.deprecation.value
- def experimental = settings.Xexperimental.value
- def fatalWarnings = settings.fatalWarnings.value
- def feature = settings.feature.value
- def future = settings.future.value
- def logClasspath = settings.Ylogcp.value
- def printStats = settings.Ystatistics.value
- def target = settings.target.value
- def unchecked = settings.unchecked.value
- def verbose = settings.verbose.value
- def virtPatmat = !settings.XoldPatmat.value
-
- /** Derived values */
- def jvm = target startsWith "jvm"
- def msil = target == "msil"
- def verboseDebug = debug && verbose
-}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 5c852ae07c..34c8e8df9a 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -8,7 +8,7 @@ package nsc
package settings
import util.ClassPath
-import io.{ Directory, Path, AbstractFile }
+import io.{ Path, AbstractFile }
class FscSettings(error: String => Unit) extends Settings(error) {
outer =>
@@ -38,7 +38,7 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
-
+
override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
val (r, args) = super.processArguments(arguments, processAll)
// we need to ensure the files specified with relative locations are absolutized based on the currentDir
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index e4f99474e1..cc77cbdf52 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -10,7 +10,6 @@ package settings
import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory }
import scala.reflect.internal.util.StringOps
-import scala.collection.mutable.ListBuffer
import scala.io.Source
import scala.reflect.{ ClassTag, classTag }
@@ -63,40 +62,33 @@ class MutableSettings(val errorFn: String => Unit)
(checkDependencies, residualArgs)
case "--" :: xs =>
(checkDependencies, xs)
+ // discard empties, sometimes they appear because of ant or etc.
+ // but discard carefully, because an empty string is valid as an argument
+ // to an option, e.g. -cp "" . So we discard them only when they appear
+ // where an option should be, not where an argument to an option should be.
+ case "" :: xs =>
+ loop(xs, residualArgs)
case x :: xs =>
- val isOpt = x startsWith "-"
- if (isOpt) {
- val newArgs = parseParams(args)
- if (args eq newArgs) {
- errorFn(s"bad option: '$x'")
- (false, args)
- }
- // discard empties, sometimes they appear because of ant or etc.
- // but discard carefully, because an empty string is valid as an argument
- // to an option, e.g. -cp "" . So we discard them only when they appear
- // in option position.
- else if (x == "") {
- loop(xs, residualArgs)
- }
- else lookupSetting(x) match {
- case Some(s) if s.shouldStopProcessing => (checkDependencies, newArgs)
- case _ => loop(newArgs, residualArgs)
+ if (x startsWith "-") {
+ parseParams(args) match {
+ case newArgs if newArgs eq args => errorFn(s"bad option: '$x'") ; (false, args)
+ case newArgs => loop(newArgs, residualArgs)
}
}
- else {
- if (processAll) loop(xs, residualArgs :+ x)
- else (checkDependencies, args)
- }
+ else if (processAll)
+ loop(xs, residualArgs :+ x)
+ else
+ (checkDependencies, args)
}
loop(arguments, Nil)
}
- def processArgumentString(params: String) = processArguments(splitParams(params), true)
+ def processArgumentString(params: String) = processArguments(splitParams(params), processAll = true)
/** Create a new Settings object, copying all user-set values.
*/
def copy(): Settings = {
val s = new Settings()
- s.processArguments(recreateArgs, true)
+ s.processArguments(recreateArgs, processAll = true)
s
}
@@ -134,7 +126,7 @@ class MutableSettings(val errorFn: String => Unit)
// if arg is of form -Xfoo:bar,baz,quux
def parseColonArg(s: String): Option[List[String]] = {
- val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
+ val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None)
// any non-Nil return value means failure and we return s unmodified
tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
@@ -184,7 +176,7 @@ class MutableSettings(val errorFn: String => Unit)
* The class loader defining `T` should provide resources `app.class.path`
* and `boot.class.path`. These resources should contain the application
* and boot classpaths in the same form as would be passed on the command line.*/
- def embeddedDefaults[T: ClassTag]: Unit =
+ def embeddedDefaults[T: ClassTag]: Unit = // called from sbt and repl
embeddedDefaults(classTag[T].runtimeClass.getClassLoader)
/** Initializes these settings for embedded use by a class from the given class loader.
@@ -248,7 +240,7 @@ class MutableSettings(val errorFn: String => Unit)
/** Add a destination directory for sources found under srcdir.
* Both directories should exits.
*/
- def add(srcDir: String, outDir: String): Unit =
+ def add(srcDir: String, outDir: String): Unit = // used in ide?
add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
checkDir(AbstractFile.getDirectory(outDir), outDir))
@@ -256,11 +248,11 @@ class MutableSettings(val errorFn: String => Unit)
private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = (
if (dir != null && dir.isDirectory)
dir
-// was: else if (allowJar && dir == null && Path.isJarOrZip(name, false))
- else if (allowJar && dir == null && Jar.isJarOrZip(name, false))
+ else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false))
new PlainFile(Path(name))
else
- throw new FatalError(name + " does not exist or is not a directory")
+// throw new FatalError(name + " does not exist or is not a directory")
+ dir
)
/** Set the single output directory. From now on, all files will
@@ -268,7 +260,7 @@ class MutableSettings(val errorFn: String => Unit)
*/
def setSingleOutput(outDir: String) {
val dst = AbstractFile.getDirectory(outDir)
- setSingleOutput(checkDir(dst, outDir, true))
+ setSingleOutput(checkDir(dst, outDir, allowJar = true))
}
def getSingleOutput: Option[AbstractFile] = singleOutDir
@@ -331,12 +323,12 @@ class MutableSettings(val errorFn: String => Unit)
case Some(d) =>
d match {
case _: VirtualDirectory | _: io.ZipArchive => Nil
- case _ => List(d.lookupPathUnchecked(srcPath, false))
+ case _ => List(d.lookupPathUnchecked(srcPath, directory = false))
}
case None =>
(outputs filter (isBelow _).tupled) match {
case Nil => Nil
- case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
+ case matches => matches.map(_._1.lookupPathUnchecked(srcPath, directory = false))
}
}
}
@@ -390,7 +382,7 @@ class MutableSettings(val errorFn: String => Unit)
def max = range map (_._2) getOrElse IntMax
override def value_=(s: Int) =
- if (isInputValid(s)) super.value_=(s) else errorMsg
+ if (isInputValid(s)) super.value_=(s) else errorMsg()
// Validate that min and max are consistent
assert(min <= max)
@@ -422,7 +414,7 @@ class MutableSettings(val errorFn: String => Unit)
if (args.isEmpty) errorAndValue("missing argument", None)
else parseArgument(args.head) match {
case Some(i) => value = i ; Some(args.tail)
- case None => errorMsg ; None
+ case None => errorMsg() ; None
}
def unparse: List[String] =
@@ -443,7 +435,7 @@ class MutableSettings(val errorFn: String => Unit)
def tryToSet(args: List[String]) = { value = true ; Some(args) }
def unparse: List[String] = if (value) List(name) else Nil
- override def tryToSetFromPropertyValue(s : String) {
+ override def tryToSetFromPropertyValue(s : String) { // used from ide
value = s.equalsIgnoreCase("true")
}
}
@@ -565,7 +557,7 @@ class MutableSettings(val errorFn: String => Unit)
Some(rest)
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
- override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide
def unparse: List[String] = value map (name + ":" + _)
withHelpSyntax(name + ":<" + arg + ">")
@@ -599,7 +591,7 @@ class MutableSettings(val errorFn: String => Unit)
}
def unparse: List[String] =
if (value == default) Nil else List(name + ":" + value)
- override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil)
+ override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide
withHelpSyntax(name + ":<" + helpArg + ">")
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 3df6334ec1..9469113238 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -43,6 +43,7 @@ trait ScalaSettings extends AbsScalaSettings
/** Internal use - syntax enhancements. */
private class EnableSettings[T <: BooleanSetting](val s: T) {
def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value))
+ def disabling(toDisable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toDisable foreach (_.value = !s.value))
def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value))
}
private implicit def installEnableSettings[T <: BooleanSetting](s: T) = new EnableSettings(s)
@@ -52,14 +53,14 @@ trait ScalaSettings extends AbsScalaSettings
val jvmargs = PrefixSetting("-J<flag>", "-J", "Pass <flag> directly to the runtime system.")
val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.")
- val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "")
+ /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "")
val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.")
/**
* Standard settings
*/
// argfiles is only for the help message
- val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
+ /*val argfiles = */ BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = OutputSetting (outputDirs, ".")
val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
@@ -69,11 +70,8 @@ trait ScalaSettings extends AbsScalaSettings
* -X "Advanced" settings
*/
val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.")
- val assemname = StringSetting ("-Xassem-name", "file", "(Requires -target:msil) Name of the output assembly.", "").dependsOn(target, "msil")
- val assemrefs = StringSetting ("-Xassem-path", "path", "(Requires -target:msil) List of assemblies referenced by the program.", ".").dependsOn(target, "msil")
- val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies. default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil")
- val sourcedir = StringSetting ("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil")
val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
+ val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss")
val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.")
val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
elidable.MINIMUM, None, elidable.byName get _)
@@ -107,14 +105,12 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
- val XoldPatmat = BooleanSetting ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
- def XO = optimise
def debuginfo = g
def dependenciesFile = dependencyfile
def nowarnings = nowarn
@@ -127,6 +123,7 @@ trait ScalaSettings extends AbsScalaSettings
val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.")
val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.")
val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.")
+ val breakCycles = BooleanSetting ("-Ybreak-cycles", "Attempt to break cycles encountered during typing")
val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
@@ -166,25 +163,22 @@ trait ScalaSettings extends AbsScalaSettings
val refinementMethodDispatch
= ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
+ val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
- val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
+ val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").
+ withDeprecationMessage("This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.")
val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
+ val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
- def stop = stopAfter
-
/** Area-specific debug output.
*/
- val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
- val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
@@ -194,7 +188,6 @@ trait ScalaSettings extends AbsScalaSettings
val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
- val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
@@ -202,6 +195,7 @@ trait ScalaSettings extends AbsScalaSettings
*/
val future = BooleanSetting("-Xfuture", "Turn on future language features.") enabling futureSettings
val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enabling optimiseSettings
+ val nooptimise = BooleanSetting("-Ynooptimise", "Clears all the flags set by -optimise. Useful for testing optimizations in isolation.") withAbbreviation "-Ynooptimize" disabling optimise::optimiseSettings
val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
// Feature extensions
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index e866ad6ae0..d173aaa848 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -41,16 +41,11 @@ trait StandardScalaSettings {
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
- List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"),
- "jvm-1.6")
+ List("jvm-1.5", "jvm-1.6", "jvm-1.7"), "jvm-1.6")
val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
+ val usemanifestcp = BooleanSetting ("-usemanifestcp", "Utilize the manifest in classpath resolution.")
val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing.")
val version = BooleanSetting ("-version", "Print product version and exit.")
-
- /** These are @<file> and -Dkey=val style settings, which don't
- * nicely map to identifiers.
- */
- val argfiles: BooleanSetting // exists only to echo help message, should be done differently
}
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 9f9879210c..2649a150ad 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -26,11 +26,11 @@ trait Warnings {
// These warnings should be pretty quiet unless you're doing
// something inadvisable.
protected def lintWarnings = List(
- // warnDeadCode,
warnInaccessible,
warnNullaryOverride,
warnNullaryUnit,
- warnAdaptedArgs
+ warnAdaptedArgs,
+ warnInferAny
)
// Warning groups.
@@ -38,9 +38,13 @@ trait Warnings {
BooleanSetting("-Xlint", "Enable recommended additional warnings.")
withPostSetHook (_ => lintWarnings foreach (_.value = true))
)
- val warnEverything = (
+
+ /*val warnEverything = */ (
BooleanSetting("-Ywarn-all", "Enable all -Y warnings.")
- withPostSetHook (_ => lintWarnings foreach (_.value = true))
+ withPostSetHook { _ =>
+ lint.value = true
+ allWarnings foreach (_.value = true)
+ }
)
// Individual warnings.
@@ -53,9 +57,10 @@ trait Warnings {
val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.")
val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override",
"Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+ val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.")
// Backward compatibility.
- def Xwarnfatal = fatalWarnings
- def Xchecknull = warnSelectNullable
- def Ywarndeadcode = warnDeadCode
+ @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt
+ @deprecated("Use warnSelectNullable", "2.11.0") def Xchecknull = warnSelectNullable // used by ide
+ @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode = warnDeadCode // used by ide
}
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index f2aab36b51..4e4efef607 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package symtab
-import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.io.AbstractFile
/** A subclass of SymbolLoaders that implements browsing behavior.
@@ -28,7 +27,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
override protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
completer.sourcefile match {
case Some(src) =>
- (if (member.isModule) member.moduleClass else member).sourceFile = src
+ (if (member.isModule) member.moduleClass else member).associatedFile = src
case _ =>
}
val decls = owner.info.decls
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 348c7f688f..61ac07d18f 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,10 +10,9 @@ import java.io.IOException
import scala.compat.Platform.currentTime
import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
-import scala.reflect.internal.Flags._
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
+import scala.reflect.io.{ AbstractFile, NoAbstractFile }
/** This class ...
*
@@ -31,6 +30,14 @@ abstract class SymbolLoaders {
member
}
+ protected def signalError(root: Symbol, ex: Throwable) {
+ if (settings.debug.value) ex.printStackTrace()
+ globalError(ex.getMessage() match {
+ case null => "i/o error while loading " + root.name
+ case msg => "error while loading " + root.name + ", " + msg
+ })
+ }
+
/** Enter class with given `name` into scope of `root`
* and give them `completer` as type.
*/
@@ -153,7 +160,7 @@ abstract class SymbolLoaders {
def sourcefile: Option[AbstractFile] = None
/**
- * Description of the resource (ClassPath, AbstractFile, MsilFile)
+ * Description of the resource (ClassPath, AbstractFile)
* being processed by this loader
*/
protected def description: String
@@ -162,25 +169,13 @@ abstract class SymbolLoaders {
private def setSource(sym: Symbol) {
sourcefile foreach (sf => sym match {
- case cls: ClassSymbol => cls.sourceFile = sf
- case mod: ModuleSymbol => mod.moduleClass.sourceFile = sf
+ case cls: ClassSymbol => cls.associatedFile = sf
+ case mod: ModuleSymbol => mod.moduleClass.associatedFile = sf
case _ => ()
})
}
override def complete(root: Symbol) {
- def signalError(ex: Exception) {
- ok = false
- if (settings.debug.value) ex.printStackTrace()
- val msg = ex.getMessage()
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
- if (!settings.isScaladoc)
- globalError(
- if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg);
- }
try {
val start = currentTime
val currentphase = phase
@@ -190,11 +185,11 @@ abstract class SymbolLoaders {
ok = true
setSource(root)
setSource(root.companionSymbol) // module -> class, class -> module
- } catch {
- case ex: IOException =>
- signalError(ex)
- case ex: MissingRequirementError =>
- signalError(ex)
+ }
+ catch {
+ case ex @ (_: IOException | _: MissingRequirementError) =>
+ ok = false
+ signalError(root, ex)
}
initRoot(root)
if (!root.isPackageClass) initRoot(root.companionSymbol)
@@ -226,7 +221,6 @@ abstract class SymbolLoaders {
assert(root.isPackageClass, root)
root.setInfo(new PackageClassInfoType(newScope, root))
- val sourcepaths = classpath.sourcepaths
if (!root.isRoot) {
for (classRep <- classpath.classes if platform.doLoad(classRep)) {
initializeFromClassPath(root, classRep)
@@ -252,7 +246,7 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) {
val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
classfileParser.parse(classfile, root)
- if (root.associatedFile eq null) {
+ if (root.associatedFile eq NoAbstractFile) {
root match {
// In fact, the ModuleSymbol forwards its setter to the module class
case _: ClassSymbol | _: ModuleSymbol =>
@@ -267,16 +261,6 @@ abstract class SymbolLoaders {
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
- class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader with FlagAssigningCompleter {
- private def typ = msilFile.msilType
- private object typeParser extends clr.TypeParser {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- }
-
- protected def description = "MsilFile "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
- protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
- }
-
class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
protected def description = "source file "+ srcfile.toString
override def fromSource = true
@@ -289,11 +273,6 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
- object clrTypes extends clr.CLRTypes {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- if (global.forMSIL) init()
- }
-
/** used from classfile parser to avoid cyclies */
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index 7a84441e09..035244e421 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package symtab
-import scala.collection.{ mutable, immutable }
import scala.language.implicitConversions
import scala.language.postfixOps
@@ -17,9 +16,6 @@ trait SymbolTrackers {
val global: Global
import global._
- private implicit lazy val TreeOrdering: Ordering[Tree] =
- Ordering by (x => (x.shortClass, x.symbol))
-
private implicit lazy val SymbolOrdering: Ordering[Symbol] =
Ordering by (x => (x.kindString, x.name.toString))
@@ -76,7 +72,6 @@ trait SymbolTrackers {
private def isFlagsChange(sym: Symbol) = changed.flags contains sym
private implicit def NodeOrdering: Ordering[Node] = Ordering by (_.root)
- private def ownersString(sym: Symbol, num: Int) = sym.ownerChain drop 1 take num mkString " -> "
object Node {
def nodes(syms: Set[Symbol]): List[Node] = {
@@ -114,7 +109,6 @@ trait SymbolTrackers {
case Some(oldFlags) =>
val added = masked & ~oldFlags
val removed = oldFlags & ~masked
- val steady = masked & ~(added | removed)
val all = masked | oldFlags
val strs = 0 to 63 map { bit =>
val flag = 1L << bit
@@ -181,7 +175,7 @@ trait SymbolTrackers {
}
def show(label: String): String = {
val hierarchy = Node(current)
- val Change(added, removed, symMap, owners, flags) = history.head
+ val Change(_, removed, symMap, _, _) = history.head
def detailString(sym: Symbol) = {
val ownerString = sym.ownerChain splitAt 3 match {
case (front, back) =>
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
index 427b5bf887..17e3b08ec2 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
@@ -29,11 +29,6 @@ class AbstractFileReader(val file: AbstractFile) {
*/
var bp: Int = 0
- /** return byte at offset 'pos'
- */
- @throws(classOf[IndexOutOfBoundsException])
- def byteAt(pos: Int): Byte = buf(pos)
-
/** read a byte
*/
@throws(classOf[IndexOutOfBoundsException])
@@ -45,7 +40,7 @@ class AbstractFileReader(val file: AbstractFile) {
/** read some bytes
*/
- def nextBytes(len: Int): Array[Byte] = {
+ def nextBytes(len: Int): Array[Byte] = { // used in ide
bp += len
buf.slice(bp - len, bp)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 1f42fa8aab..f8930c4ddd 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -23,15 +23,14 @@ import scala.tools.nsc.io.AbstractFile
abstract class ClassfileParser {
val global: Global
import global._
- import definitions.{ AnnotationClass, ClassfileAnnotationClass }
import scala.reflect.internal.ClassfileConstants._
import Flags._
protected var in: AbstractFileReader = _ // the class file reader
protected var clazz: Symbol = _ // the class symbol containing dynamic members
protected var staticModule: Symbol = _ // the module symbol containing static members
- protected var instanceScope: Scope = _ // the scope of all instance definitions
- protected var staticScope: Scope = _ // the scope of all static definitions
+ protected var instanceScope: Scope = _ // the scope of all instance definitions
+ protected var staticScope: Scope = _ // the scope of all static definitions
protected var pool: ConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
@@ -100,7 +99,7 @@ abstract class ClassfileParser {
this.staticModule = if (root.isModule) root else root.companionModule
this.isScala = false
- parseHeader
+ parseHeader()
this.pool = new ConstantPool
parseClass()
}
@@ -186,7 +185,7 @@ abstract class ClassfileParser {
if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
val name = getExternalName(in.getChar(start + 1))
if (nme.isModuleName(name))
- c = rootMirror.getModule(nme.stripModuleSuffix(name))
+ c = rootMirror.getModuleByName(nme.stripModuleSuffix(name))
else
c = classNameToSymbol(name)
@@ -227,7 +226,7 @@ abstract class ClassfileParser {
val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe)
debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
- forceMangledName(tpe0.typeSymbol.name, false)
+ forceMangledName(tpe0.typeSymbol.name, module = false)
val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
// println("new tpe: " + tpe + " at phase: " + phase)
@@ -235,26 +234,26 @@ abstract class ClassfileParser {
val index = in.getChar(start + 1)
val name = getExternalName(in.getChar(starts(index) + 1))
//assert(name.endsWith("$"), "Not a module class: " + name)
- f = forceMangledName(name dropRight 1, true)
+ f = forceMangledName(name dropRight 1, module = true)
if (f == NoSymbol)
- f = rootMirror.getModule(name dropRight 1)
+ f = rootMirror.getModuleByName(name dropRight 1)
} else {
val origName = nme.originalName(name)
val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
// println("\t" + owner.info.member(name).tpe.widen + " =:= " + tpe)
- f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe)
+ f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
if (f == NoSymbol)
- f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
+ f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
if (f == NoSymbol) {
// if it's an impl class, try to find it's static member inside the class
if (ownerTpe.typeSymbol.isImplClass) {
// println("impl class, member: " + owner.tpe.member(origName) + ": " + owner.tpe.member(origName).tpe)
- f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
+ f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
} else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
f = tpe match {
- case MethodType(_, _) => owner.newMethod(name, owner.pos)
- case _ => owner.newVariable(name, owner.pos)
+ case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos)
+ case _ => owner.newVariable(name.toTermName, owner.pos)
}
f setInfo tpe
log("created fake member " + f.fullName)
@@ -283,7 +282,7 @@ abstract class ClassfileParser {
if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
val name = getName(in.getChar(start + 1).toInt)
// create a dummy symbol for method types
- val dummySym = ownerTpe.typeSymbol.newMethod(name, ownerTpe.typeSymbol.pos)
+ val dummySym = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos)
var tpe = getType(dummySym, in.getChar(start + 3).toInt)
// fix the return type, which is blindly set to the class currently parsed
@@ -361,7 +360,7 @@ abstract class ClassfileParser {
}
value match {
case ct: Constant => ct
- case cls: Symbol => Constant(cls.tpe)
+ case cls: Symbol => Constant(cls.tpe_*)
case arr: Type => Constant(arr)
}
}
@@ -423,9 +422,9 @@ abstract class ClassfileParser {
var sym: Symbol = rootMirror.RootClass
// was "at flatten.prev"
- beforeFlatten {
+ enteringFlatten {
for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
- val sym1 = beforeIcode {
+ val sym1 = enteringIcode {
sym.linkedClassOfClass.info
sym.info.decl(part.encode)
}//.suchThat(module == _.isModule)
@@ -458,7 +457,7 @@ abstract class ClassfileParser {
ss = name.subName(start, end)
sym = owner.info.decls lookup ss
if (sym == NoSymbol) {
- sym = owner.newPackage(ss) setInfo completer
+ sym = owner.newPackage(ss.toTermName) setInfo completer
sym.moduleClass setInfo completer
owner.info.decls enter sym
}
@@ -478,7 +477,7 @@ abstract class ClassfileParser {
if (name.pos('.') == name.length)
definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
else
- rootMirror.getClass(name) // see tickets #2464, #3756
+ rootMirror.getClassByName(name) // see tickets #2464, #3756
} catch {
case _: FatalError => loadClassSymbol(name)
}
@@ -500,8 +499,8 @@ abstract class ClassfileParser {
def parseClass() {
val jflags = in.nextChar
val isAnnotation = hasAnnotation(jflags)
- var sflags = toScalaClassFlags(jflags)
- var nameIdx = in.nextChar
+ val sflags = toScalaClassFlags(jflags)
+ val nameIdx = in.nextChar
currentClass = pool.getClassName(nameIdx)
/** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
@@ -515,9 +514,9 @@ abstract class ClassfileParser {
}
else raiseLoaderLevel {
val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe
+ else pool.getSuperClass(in.nextChar).tpe_*
val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe_*
if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
superType :: ifaces
}
@@ -541,7 +540,7 @@ abstract class ClassfileParser {
val staticInfo = ClassInfoType(List(), staticScope, moduleClass)
if (!isScala && !isScalaRaw)
- enterOwnInnerClasses
+ enterOwnInnerClasses()
val curbp = in.bp
skipMembers() // fields
@@ -565,7 +564,7 @@ abstract class ClassfileParser {
0 until in.nextChar foreach (_ => parseMethod())
val needsConstructor = (
!sawPrivateConstructor
- && instanceScope.lookup(nme.CONSTRUCTOR) == NoSymbol
+ && !(instanceScope containsName nme.CONSTRUCTOR)
&& (sflags & INTERFACE) == 0
)
if (needsConstructor)
@@ -599,13 +598,13 @@ abstract class ClassfileParser {
def parseField() {
val jflags = in.nextChar
- var sflags = toScalaFieldFlags(jflags)
+ val sflags = toScalaFieldFlags(jflags)
if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) {
in.skip(4); skipAttributes()
} else {
val name = pool.getName(in.nextChar)
val info = pool.getType(in.nextChar)
- val sym = getOwner(jflags).newValue(name, NoPosition, sflags)
+ val sym = getOwner(jflags).newValue(name.toTermName, NoPosition, sflags)
val isEnum = (jflags & JAVA_ACC_ENUM) != 0
sym setInfo {
@@ -629,7 +628,7 @@ abstract class ClassfileParser {
def parseMethod() {
val jflags = in.nextChar.toInt
- var sflags = toScalaMethodFlags(jflags)
+ val sflags = toScalaMethodFlags(jflags)
if (isPrivate(jflags) && !global.settings.optimise.value) {
val name = pool.getName(in.nextChar)
if (name == nme.CONSTRUCTOR)
@@ -640,7 +639,7 @@ abstract class ClassfileParser {
in.skip(4); skipAttributes()
} else {
val name = pool.getName(in.nextChar)
- val sym = getOwner(jflags).newMethod(name, NoPosition, sflags)
+ val sym = getOwner(jflags).newMethod(name.toTermName, NoPosition, sflags)
var info = pool.getType(sym, (in.nextChar))
if (name == nme.CONSTRUCTOR)
info match {
@@ -735,18 +734,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0, tp)
- newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
- } else if (classSym.isMonomorphicType) {
- tp
- } else {
- // raw type - existentially quantify all type parameters
- val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
- val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
- val res = newExistentialType(eparams, t)
- if (settings.debug.value && settings.verbose.value)
- println("raw type " + classSym + " -> " + res)
- res
+ logResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
}
+ // isMonomorphicType is false if the info is incomplete, as it usually is here
+ // so have to check unsafeTypeParams.isEmpty before worrying about raw type case below,
+ // or we'll create a boatload of needless existentials.
+ else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
+ // raw type - existentially quantify all type parameters
+ else logResult(s"raw type from $classSym")(definitions.unsafeClassExistentialType(classSym))
case tp =>
assert(sig.charAt(index) != '<', tp)
tp
@@ -754,7 +749,7 @@ abstract class ClassfileParser {
val classSym = classNameToSymbol(subName(c => c == ';' || c == '<'))
assert(!classSym.isOverloaded, classSym.alternatives)
- var tpe = processClassType(processInner(classSym.tpe))
+ var tpe = processClassType(processInner(classSym.tpe_*))
while (sig.charAt(index) == '.') {
accept('.')
val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName
@@ -787,7 +782,7 @@ abstract class ClassfileParser {
index += 1
val restype = if (sym != null && sym.isClassConstructor) {
accept('V')
- clazz.tpe
+ clazz.tpe_*
} else
sig2type(tparams, skiptvs)
JavaMethodType(sym.newSyntheticValueParams(paramtypes.toList), restype)
@@ -819,14 +814,14 @@ abstract class ClassfileParser {
val tpname = subName(':'.==).toTypeName
val s = sym.newTypeParameter(tpname)
tparams = tparams + (tpname -> s)
- sig2typeBounds(tparams, true)
+ sig2typeBounds(tparams, skiptvs = true)
newTParams += s
}
index = start
while (sig.charAt(index) != '>') {
val tpname = subName(':'.==).toTypeName
val s = tparams(tpname)
- s.setInfo(sig2typeBounds(tparams, false))
+ s.setInfo(sig2typeBounds(tparams, skiptvs = false))
}
accept('>')
}
@@ -835,12 +830,12 @@ abstract class ClassfileParser {
sym.setInfo(new TypeParamsType(ownTypeParams))
val tpe =
if ((sym eq null) || !sym.isClass)
- sig2type(tparams, false)
+ sig2type(tparams, skiptvs = false)
else {
classTParams = tparams
val parents = new ListBuffer[Type]()
while (index < end) {
- parents += sig2type(tparams, false) // here the variance doesnt'matter
+ parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter
}
ClassInfoType(parents.toList, instanceScope, sym)
}
@@ -875,7 +870,7 @@ abstract class ClassfileParser {
sym.setFlag(SYNTHETIC | ARTIFACT)
in.skip(attrLen)
case tpnme.BridgeATTR =>
- sym.setFlag(BRIDGE)
+ sym.setFlag(BRIDGE | ARTIFACT)
in.skip(attrLen)
case tpnme.DeprecatedATTR =>
val arg = Literal(Constant("see corresponding Javadoc for more information."))
@@ -1083,7 +1078,7 @@ abstract class ClassfileParser {
def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) {
val completer = new global.loaders.ClassfileLoader(file)
val name = entry.originalName
- var sflags = toScalaClassFlags(jflags)
+ val sflags = toScalaClassFlags(jflags)
val owner = getOwner(jflags)
val scope = getScope(jflags)
val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
@@ -1136,7 +1131,7 @@ abstract class ClassfileParser {
case tpnme.ScalaSignatureATTR =>
isScala = true
val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
- pbuf.readNat; pbuf.readNat;
+ pbuf.readNat(); pbuf.readNat()
if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature
isScalaAnnot = true // is in a ScalaSignature annotation.
in.skip(attrLen)
@@ -1171,21 +1166,7 @@ abstract class ClassfileParser {
originalName + " in " + outerName + "(" + externalName +")"
}
- object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
- /** Return the Symbol of the top level class enclosing `name`,
- * or 'name's symbol if no entry found for `name`.
- */
- def topLevelClass(name: Name): Symbol = {
- val tlName = if (isDefinedAt(name)) {
- var entry = this(name)
- while (isDefinedAt(entry.outerName))
- entry = this(entry.outerName)
- entry.outerName
- } else
- name
- classNameToSymbol(tlName)
- }
-
+ object innerClasses extends mutable.HashMap[Name, InnerClassEntry] {
/** Return the class symbol for `externalName`. It looks it up in its outer class.
* Forces all outer class symbols to be completed.
*
@@ -1210,7 +1191,7 @@ abstract class ClassfileParser {
// if loading during initialization of `definitions` typerPhase is not yet set.
// in that case we simply load the member at the current phase
if (currentRun.typerPhase != null)
- beforeTyper(getMember(sym, innerName.toTypeName))
+ enteringTyper(getMember(sym, innerName.toTypeName))
else
getMember(sym, innerName.toTypeName)
@@ -1242,16 +1223,20 @@ abstract class ClassfileParser {
}
def skipAttributes() {
- val attrCount = in.nextChar
- for (i <- 0 until attrCount) {
- in.skip(2); in.skip(in.nextInt)
+ var attrCount: Int = in.nextChar
+ while (attrCount > 0) {
+ in skip 2
+ in skip in.nextInt
+ attrCount -= 1
}
}
def skipMembers() {
- val memberCount = in.nextChar
- for (i <- 0 until memberCount) {
- in.skip(6); skipAttributes()
+ var memberCount: Int = in.nextChar
+ while (memberCount > 0) {
+ in skip 6
+ skipAttributes()
+ memberCount -= 1
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 13c0d8993a..7010c9e20a 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -9,9 +9,7 @@ package classfile
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
-import backend.icode._
import ClassfileConstants._
-import scala.reflect.internal.Flags._
/** ICode reader from Java bytecode.
*
@@ -33,7 +31,6 @@ abstract class ICodeReader extends ClassfileParser {
* for non-static members.
*/
def readClass(cls: Symbol): (IClass, IClass) = {
- var classFile: io.AbstractFile = null;
cls.info // ensure accurate type information
isScalaModule = cls.isModule && !cls.isJavaDefined
@@ -58,11 +55,9 @@ abstract class ICodeReader extends ClassfileParser {
override def parseClass() {
this.instanceCode = new IClass(clazz)
this.staticCode = new IClass(staticModule)
- val jflags = in.nextChar
- val isAttribute = (jflags & JAVA_ACC_ANNOTATION) != 0
- val sflags = toScalaClassFlags(jflags) // what, this is never used??
- val c = pool getClassSymbol in.nextChar
+ in.nextChar
+ pool getClassSymbol in.nextChar
parseInnerClasses()
in.skip(2) // super class
@@ -70,13 +65,13 @@ abstract class ICodeReader extends ClassfileParser {
val fieldCount = in.nextChar
for (i <- 0 until fieldCount) parseField()
val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod();
+ for (i <- 0 until methodCount) parseMethod()
instanceCode.methods = instanceCode.methods.reverse
staticCode.methods = staticCode.methods.reverse
}
override def parseField() {
- val (jflags, sym) = parseMember(true)
+ val (jflags, sym) = parseMember(field = true)
getCode(jflags) addField new IField(sym)
skipAttributes()
}
@@ -85,7 +80,7 @@ abstract class ICodeReader extends ClassfileParser {
val jflags = in.nextChar
val name = pool getName in.nextChar
val owner = getOwner(jflags)
- val dummySym = owner.newMethod(name, owner.pos, toScalaMethodFlags(jflags))
+ val dummySym = owner.newMethod(name.toTermName, owner.pos, toScalaMethodFlags(jflags))
try {
val ch = in.nextChar
@@ -95,11 +90,11 @@ abstract class ICodeReader extends ClassfileParser {
(jflags, NoSymbol)
else {
val owner = getOwner(jflags)
- var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe))
+ var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe))
if (sym == NoSymbol)
- sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
+ sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
- sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
+ sym = if (field) owner.newValue(name.toTermName, owner.pos, toScalaFieldFlags(jflags)) else dummySym
sym setInfoAndEnter tpe
log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
}
@@ -124,8 +119,8 @@ abstract class ICodeReader extends ClassfileParser {
}
override def parseMethod() {
- val (jflags, sym) = parseMember(false)
- var beginning = in.bp
+ val (jflags, sym) = parseMember(field = false)
+ val beginning = in.bp
try {
if (sym != NoSymbol) {
this.method = new IMethod(sym)
@@ -136,13 +131,13 @@ abstract class ICodeReader extends ClassfileParser {
val attributeCount = in.nextChar
for (i <- 0 until attributeCount) parseAttribute()
} else {
- debuglog("Skipping non-existent method.");
- skipAttributes();
+ debuglog("Skipping non-existent method.")
+ skipAttributes()
}
} catch {
case e: MissingRequirementError =>
- in.bp = beginning; skipAttributes
- debuglog("Skipping non-existent method. " + e.msg);
+ in.bp = beginning; skipAttributes()
+ debuglog("Skipping non-existent method. " + e.msg)
}
}
@@ -170,11 +165,11 @@ abstract class ICodeReader extends ClassfileParser {
}
else if (nme.isModuleName(name)) {
val strippedName = nme.stripModuleSuffix(name)
- forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName)
+ forceMangledName(newTermName(strippedName.decode), module = true) orElse rootMirror.getModuleByName(strippedName)
}
else {
- forceMangledName(name, false)
- afterFlatten(rootMirror.getClassByName(name.toTypeName))
+ forceMangledName(name, module = false)
+ exitingFlatten(rootMirror.getClassByName(name.toTypeName))
}
if (sym.isModule)
sym.moduleClass
@@ -222,7 +217,7 @@ abstract class ICodeReader extends ClassfileParser {
val instr = toUnsignedByte(in.nextByte)
instr match {
- case JVM.nop => parseInstruction
+ case JVM.nop => parseInstruction()
case JVM.aconst_null => code emit CONSTANT(Constant(null))
case JVM.iconst_m1 => code emit CONSTANT(Constant(-1))
case JVM.iconst_0 => code emit CONSTANT(Constant(0))
@@ -252,9 +247,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.aload =>
val local = in.nextByte.toInt; size += 1
if (local == 0 && !method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)))
case JVM.iload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, INT)))
case JVM.iload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, INT)))
@@ -274,9 +269,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, DOUBLE)))
case JVM.aload_0 =>
if (!method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)))
case JVM.aload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, ObjectReference)))
case JVM.aload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, ObjectReference)))
case JVM.aload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, ObjectReference)))
@@ -471,41 +466,41 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.return_ => code.emit(RETURN(UNIT))
case JVM.getstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size += 2
+ val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2
if (field.hasModuleFlag)
code emit LOAD_MODULE(field)
else
- code emit LOAD_FIELD(field, true)
+ code emit LOAD_FIELD(field, isStatic = true)
case JVM.putstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size += 2
- code.emit(STORE_FIELD(field, true))
+ val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2
+ code.emit(STORE_FIELD(field, isStatic = true))
case JVM.getfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size += 2
- code.emit(LOAD_FIELD(field, false))
+ val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2
+ code.emit(LOAD_FIELD(field, isStatic = false))
case JVM.putfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size += 2
- code.emit(STORE_FIELD(field, false))
+ val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2
+ code.emit(STORE_FIELD(field, isStatic = false))
case JVM.invokevirtual =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 2
+ val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2
code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokeinterface =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 4
+ val m = pool.getMemberSymbol(in.nextChar, static = false); size += 4
in.skip(2)
code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokespecial =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 2
- val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true)
- else SuperCall(m.owner.name);
+ val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2
+ val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true)
+ else SuperCall(m.owner.name)
code.emit(CALL_METHOD(m, style))
case JVM.invokestatic =>
- val m = pool.getMemberSymbol(in.nextChar, true); size += 2
+ val m = pool.getMemberSymbol(in.nextChar, static = true); size += 2
if (isBox(m))
code.emit(BOX(toTypeKind(m.info.paramTypes.head)))
else if (isUnbox(m))
code.emit(UNBOX(toTypeKind(m.info.resultType)))
else
- code.emit(CALL_METHOD(m, Static(false)))
+ code.emit(CALL_METHOD(m, Static(onInstance = false)))
case JVM.new_ =>
code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
@@ -586,7 +581,7 @@ abstract class ICodeReader extends ClassfileParser {
}
pc = 0
- while (pc < codeLength) parseInstruction
+ while (pc < codeLength) parseInstruction()
val exceptionEntries = in.nextChar.toInt
code.containsEHs = (exceptionEntries != 0)
@@ -637,9 +632,9 @@ abstract class ICodeReader extends ClassfileParser {
else instanceCode
class LinearCode {
- var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
- var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
- var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
+ val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
+ val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
+ val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
var containsDUPX = false
var containsNEW = false
@@ -669,7 +664,6 @@ abstract class ICodeReader extends ClassfileParser {
val blocks = makeBasicBlocks
var otherBlock: BasicBlock = NoBasicBlock
- var disableJmpTarget = false
for ((pc, instr) <- instrs.iterator) {
// Console.println("> " + pc + ": " + instr);
@@ -677,7 +671,7 @@ abstract class ICodeReader extends ClassfileParser {
otherBlock = blocks(pc)
if (!bb.closed && otherBlock != bb) {
bb.emit(JUMP(otherBlock))
- bb.close
+ bb.close()
// Console.println("\t> closing bb: " + bb)
}
bb = otherBlock
@@ -720,46 +714,44 @@ abstract class ICodeReader extends ClassfileParser {
val tfa = new analysis.MethodTFA() {
import analysis._
- import analysis.typeFlowLattice.IState
/** Abstract interpretation for one instruction. */
override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
- val bindings = out.vars
val stack = out.stack
import stack.push
i match {
case DUP_X1 =>
val (one, two) = stack.pop2
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
case DUP_X2 =>
val (one, two, three) = stack.pop3
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
case DUP2_X1 =>
val (one, two) = stack.pop2
if (one.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else {
val three = stack.pop
- push(two); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(three); push(two); push(one)
}
case DUP2_X2 =>
val (one, two) = stack.pop2
if (one.isWideType && two.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else if (one.isWideType) {
val three = stack.pop
assert(!three.isWideType, "Impossible")
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
} else {
val three = stack.pop
if (three.isWideType) {
- push(two); push(one); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(one); push(three); push(two); push(one)
} else {
val four = stack.pop
- push(two); push(one); push(four); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(four); push(one); push(three); push(two); push(one)
}
}
@@ -772,7 +764,7 @@ abstract class ICodeReader extends ClassfileParser {
// method.dump
tfa.init(method)
- tfa.run
+ tfa.run()
for (bb <- linearizer.linearize(method)) {
var info = tfa.in(bb)
for (i <- bb.toList) {
@@ -787,7 +779,7 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
case DUP_X2 =>
val one = info.stack.types(0)
@@ -800,30 +792,30 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
else {
- val tmp3 = freshLocal(info.stack.types(2));
+ val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X1 =>
val one = info.stack.types(0)
val two = info.stack.types(1)
- val tmp1 = freshLocal(one);
- val tmp2 = freshLocal(two);
+ val tmp1 = freshLocal(one)
+ val tmp2 = freshLocal(two)
if (one.isWideType) {
assert(!two.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
@@ -832,7 +824,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X2 =>
@@ -845,21 +837,21 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else if (one.isWideType) {
val three = info.stack.types(2)
assert(!two.isWideType && !three.isWideType, "Impossible")
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val three = info.stack.types(2)
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
if (three.isWideType) {
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -868,10 +860,10 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val four = info.stack.types(3)
- val tmp4 = freshLocal(three);
+ val tmp4 = freshLocal(three)
assert(!four.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -882,7 +874,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp4),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
}
case _ =>
@@ -897,11 +889,11 @@ abstract class ICodeReader extends ClassfileParser {
import opcodes._
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
rdef.init(method)
- rdef.run
+ rdef.run()
for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match {
case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor =>
- def loop(bb0: BasicBlock, idx0: Int, depth: Int = 0): Unit = {
+ def loop(bb0: BasicBlock, idx0: Int, depth: Int): Unit = {
rdef.findDefs(bb0, idx0, 1, depth) match {
case ((bb1, idx1)) :: _ =>
bb1(idx1) match {
@@ -938,7 +930,7 @@ abstract class ICodeReader extends ClassfileParser {
locals.get(idx) match {
case Some(ls) =>
- val l = ls find { loc => loc._2 <:< kind }
+ val l = ls find { loc => loc._2 isAssignabledTo kind }
l match {
case Some((loc, _)) => loc
case None =>
@@ -949,8 +941,8 @@ abstract class ICodeReader extends ClassfileParser {
l
}
case None =>
- checkValidIndex
- val l = freshLocal(idx, kind, false)
+ checkValidIndex()
+ val l = freshLocal(idx, kind, isArg = false)
debuglog("Added new local for idx " + idx + ": " + kind)
locals += (idx -> List((l, kind)))
l
@@ -962,7 +954,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Return a fresh Local variable for the given index.
*/
private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType)
val l = new Local(sym, kind, isArg)
method.addLocal(l)
l
@@ -974,7 +966,7 @@ abstract class ICodeReader extends ClassfileParser {
* the original method. */
def freshLocal(kind: TypeKind): Local = {
count += 1
- freshLocal(maxLocals + count, kind, false)
+ freshLocal(maxLocals + count, kind, isArg = false)
}
/** add a method param with the given index. */
@@ -992,7 +984,8 @@ abstract class ICodeReader extends ClassfileParser {
jmpTargets += pc
}
- case class LJUMP(pc: Int) extends LazyJump(pc);
+ case class LJUMP(pc: Int) extends LazyJump(pc)
+
case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
extends LazyJump(success) {
override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index e8b0cd2696..140be0e17b 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -26,12 +26,8 @@ import Flags._
abstract class Pickler extends SubComponent {
import global._
- private final val showSig = false
-
val phaseName = "pickler"
- currentRun
-
def newPhase(prev: Phase): StdPhase = new PicklePhase(prev)
class PicklePhase(prev: Phase) extends StdPhase(prev) {
@@ -68,12 +64,8 @@ abstract class Pickler extends SubComponent {
return
}
- if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
- unit.error(t.pos, t.symbol.typeParams.length match {
- case 0 => "macro has not been expanded"
- case 1 => "this type parameter must be specified"
- case _ => "these type parameters must be specified"
- })
+ if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) {
+ unit.error(t.pos, "macro has not been expanded")
return
}
}
@@ -138,11 +130,34 @@ abstract class Pickler extends SubComponent {
true
}
+ /** If the symbol is a type skolem, deskolemize and log it.
+ * If we fail to deskolemize, in a method like
+ * trait Trait[+A] { def f[CC[X]] : CC[A] }
+ * the applied type CC[A] will hold a different CC symbol
+ * than the type-constructor type-parameter CC.
+ */
+ private def deskolemize(sym: Symbol) = {
+ if (sym.isTypeSkolem) {
+ val sym1 = sym.deSkolemize
+ log({
+ val what0 = sym.defString
+ val what = sym1.defString match {
+ case `what0` => what0
+ case other => what0 + "->" + other
+ }
+ val where = sym.enclMethod.fullLocationString
+ s"deskolemizing $what in $where"
+ })
+ sym1
+ }
+ else sym
+ }
+
/** Store symbol in index. If symbol is local, also store everything it references.
- *
- * @param sym ...
*/
- def putSymbol(sym: Symbol) {
+ def putSymbol(sym0: Symbol) {
+ val sym = deskolemize(sym0)
+
if (putEntry(sym)) {
if (isLocal(sym)) {
putEntry(sym.name)
@@ -150,7 +165,7 @@ abstract class Pickler extends SubComponent {
putSymbol(sym.privateWithin)
putType(sym.info)
if (sym.thisSym.tpeHK != sym.tpeHK)
- putType(sym.typeOfThis);
+ putType(sym.typeOfThis)
putSymbol(sym.alias)
if (!sym.children.isEmpty) {
val (locals, globals) = sym.children partition (_.isLocalClass)
@@ -177,7 +192,7 @@ abstract class Pickler extends SubComponent {
*/
private def putType(tp: Type): Unit = if (putEntry(tp)) {
tp match {
- case NoType | NoPrefix /*| DeBruijnIndex(_, _) */ =>
+ case NoType | NoPrefix =>
;
case ThisType(sym) =>
putSymbol(sym)
@@ -217,8 +232,8 @@ abstract class Pickler extends SubComponent {
// val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal)
// boundSyms = tparams ::: boundSyms
// try {
- putType(restpe);
-// } finally {
+ putType(restpe)
+ // } finally {
// boundSyms = savedBoundSyms
// }
putSymbols(tparams)
@@ -235,7 +250,7 @@ abstract class Pickler extends SubComponent {
private def putTree(tree: Tree): Unit = if (putEntry(tree)) {
if (tree != EmptyTree)
putType(tree.tpe)
- if (tree.hasSymbol)
+ if (tree.hasSymbolField)
putSymbol(tree.symbol)
tree match {
@@ -427,7 +442,7 @@ abstract class Pickler extends SubComponent {
* argument of some Annotation */
private def putMods(mods: Modifiers) = if (putEntry(mods)) {
// annotations in Modifiers are removed by the typechecker
- val Modifiers(flags, privateWithin, Nil) = mods
+ val Modifiers(_, privateWithin, Nil) = mods
putEntry(privateWithin)
}
@@ -495,7 +510,13 @@ abstract class Pickler extends SubComponent {
/** Write a reference to object, i.e., the object's number in the map index.
*/
- private def writeRef(ref: AnyRef) { writeNat(index(ref)) }
+ private def writeRef(ref0: AnyRef) {
+ val ref = ref0 match {
+ case sym: Symbol => deskolemize(sym)
+ case _ => ref0
+ }
+ writeNat(index(ref))
+ }
private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef }
private def writeRefsWithLength(refs: List[AnyRef]) {
writeNat(refs.length)
@@ -568,7 +589,7 @@ abstract class Pickler extends SubComponent {
tag
case sym: ClassSymbol =>
writeSymInfo(sym)
- if (sym.thisSym.tpe != sym.tpe) writeRef(sym.typeOfThis)
+ if (sym.thisSym.tpe_* != sym.tpe_*) writeRef(sym.typeOfThis)
CLASSsym
case sym: TypeSymbol =>
writeSymInfo(sym)
@@ -609,8 +630,6 @@ abstract class Pickler extends SubComponent {
writeRef(restpe); writeRefs(tparams); POLYtpe
case ExistentialType(tparams, restpe) =>
writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe
- // case DeBruijnIndex(l, i) =>
- // writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
case c @ Constant(_) =>
if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
@@ -993,115 +1012,6 @@ abstract class Pickler extends SubComponent {
patchNat(startpos + 1, writeIndex - (startpos + 2))
}
- /** Print entry for diagnostics */
- def printEntryAtIndex(idx: Int) = printEntry(entries(idx))
- def printEntry(entry: AnyRef) {
- def printRef(ref: AnyRef) {
- print(index(ref)+
- (if (ref.isInstanceOf[Name]) "("+ref+") " else " "))
- }
- def printRefs(refs: List[AnyRef]) { refs foreach printRef }
- def printSymInfo(sym: Symbol) {
- var posOffset = 0
- printRef(sym.name)
- printRef(localizedOwner(sym))
- print(flagsToString(sym.flags & PickledFlags)+" ")
- if (sym.hasAccessBoundary) printRef(sym.privateWithin)
- printRef(sym.info)
- }
- def printBody(entry: AnyRef) = entry match {
- case name: Name =>
- print((if (name.isTermName) "TERMname " else "TYPEname ")+name)
- case NoSymbol =>
- print("NONEsym")
- case sym: Symbol if !isLocal(sym) =>
- if (sym.isModuleClass) {
- print("EXTMODCLASSref "); printRef(sym.name.toTermName)
- } else {
- print("EXTref "); printRef(sym.name)
- }
- if (!sym.owner.isRoot) printRef(sym.owner)
- case sym: ClassSymbol =>
- print("CLASSsym ")
- printSymInfo(sym)
- if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis)
- case sym: TypeSymbol =>
- print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ")
- printSymInfo(sym)
- case sym: TermSymbol =>
- print(if (sym.isModule) "MODULEsym " else "VALsym ")
- printSymInfo(sym)
- if (sym.alias != NoSymbol) printRef(sym.alias)
- case NoType =>
- print("NOtpe")
- case NoPrefix =>
- print("NOPREFIXtpe")
- case ThisType(sym) =>
- print("THIStpe "); printRef(sym)
- case SingleType(pre, sym) =>
- print("SINGLEtpe "); printRef(pre); printRef(sym);
- case ConstantType(value) =>
- print("CONSTANTtpe "); printRef(value);
- case TypeRef(pre, sym, args) =>
- print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args);
- case TypeBounds(lo, hi) =>
- print("TYPEBOUNDStpe "); printRef(lo); printRef(hi);
- case tp @ RefinedType(parents, decls) =>
- print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents);
- case ClassInfoType(parents, decls, clazz) =>
- print("CLASSINFOtpe "); printRef(clazz); printRefs(parents);
- case mt @ MethodType(formals, restpe) =>
- print("METHODtpe"); printRef(restpe); printRefs(formals)
- case PolyType(tparams, restpe) =>
- print("POLYtpe "); printRef(restpe); printRefs(tparams);
- case ExistentialType(tparams, restpe) =>
- print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams);
- print("||| "+entry)
- // case DeBruijnIndex(l, i) =>
- // print("DEBRUIJNINDEXtpe "); print(l+" "+i)
- case c @ Constant(_) =>
- print("LITERAL ")
- if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0))
- else if (c.tag == ByteTag) print("Byte "+c.longValue)
- else if (c.tag == ShortTag) print("Short "+c.longValue)
- else if (c.tag == CharTag) print("Char "+c.longValue)
- else if (c.tag == IntTag) print("Int "+c.longValue)
- else if (c.tag == LongTag) print("Long "+c.longValue)
- else if (c.tag == FloatTag) print("Float "+c.floatValue)
- else if (c.tag == DoubleTag) print("Double "+c.doubleValue)
- else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) }
- else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) }
- else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) }
- case AnnotatedType(annots, tp, selfsym) =>
- if (settings.selfInAnnots.value) {
- print("ANNOTATEDWSELFtpe ")
- printRef(tp)
- printRef(selfsym)
- printRefs(annots)
- } else {
- print("ANNOTATEDtpe ")
- printRef(tp)
- printRefs(annots)
- }
- case (target: Symbol, AnnotationInfo(atp, args, Nil)) =>
- print("SYMANNOT ")
- printRef(target)
- printRef(atp)
- for (c <- args) printRef(c)
- case (target: Symbol, children: List[_]) =>
- print("CHILDREN ")
- printRef(target)
- for (c <- children) printRef(c.asInstanceOf[Symbol])
- case AnnotationInfo(atp, args, Nil) =>
- print("ANNOTINFO")
- printRef(atp)
- for (c <- args) printRef(c)
- case _ =>
- throw new FatalError("bad entry: " + entry + " " + entry.getClass)
- }
- printBody(entry); println()
- }
-
/** Write byte array */
def writeArray() {
assert(writeIndex == 0)
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
deleted file mode 100644
index 40189b9444..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.File
-import java.util.{Comparator, StringTokenizer}
-import scala.util.Sorting
-import ch.epfl.lamp.compiler.msil._
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.util.{Position, NoPosition}
-
-/**
- * Collects all types from all reference assemblies.
- */
-abstract class CLRTypes {
-
- val global: Global
- import global.Symbol
- import global.definitions
-
- //##########################################################################
-
- var BYTE: Type = _
- var UBYTE: Type = _
- var SHORT: Type = _
- var USHORT: Type = _
- var CHAR: Type = _
- var INT: Type = _
- var UINT: Type = _
- var LONG: Type = _
- var ULONG: Type = _
- var FLOAT: Type = _
- var DOUBLE: Type = _
- var BOOLEAN: Type = _
- var VOID: Type = _
- var ENUM: Type = _
- var DELEGATE: Type = _
-
- var OBJECT: Type = _
- var STRING: Type = _
- var STRING_ARRAY: Type = _
-
- var VALUE_TYPE: Type = _
-
- var SCALA_SYMTAB_ATTR: Type = _
- var SYMTAB_CONSTR: ConstructorInfo = _
- var SYMTAB_DEFAULT_CONSTR: ConstructorInfo = _
-
- var DELEGATE_COMBINE: MethodInfo = _
- var DELEGATE_REMOVE: MethodInfo = _
-
- val types: mutable.Map[Symbol,Type] = new mutable.HashMap
- val constructors: mutable.Map[Symbol,ConstructorInfo] = new mutable.HashMap
- val methods: mutable.Map[Symbol,MethodInfo] = new mutable.HashMap
- val fields: mutable.Map[Symbol, FieldInfo] = new mutable.HashMap
- val sym2type: mutable.Map[Type,Symbol] = new mutable.HashMap
- val addressOfViews = new mutable.HashSet[Symbol]
- val mdgptrcls4clssym: mutable.Map[ /*cls*/ Symbol, /*cls*/ Symbol] = new mutable.HashMap
-
- def isAddressOf(msym : Symbol) = addressOfViews.contains(msym)
-
- def isNonEnumValuetype(cls: Symbol) = {
- val msilTOpt = types.get(cls)
- val res = msilTOpt.isDefined && {
- val msilT = msilTOpt.get
- msilT.IsValueType && !msilT.IsEnum
- }
- res
- }
-
- def isValueType(cls: Symbol): Boolean = {
- val opt = types.get(cls)
- opt.isDefined && opt.get.IsValueType
- }
-
- def init() = try { // initialize
- // the MsilClasspath (nsc/util/Classpath.scala) initializes the msil-library by calling
- // Assembly.LoadFrom("mscorlib.dll"), so this type should be found
- Type.initMSCORLIB(getTypeSafe("System.String").Assembly)
-
- BYTE = getTypeSafe("System.SByte")
- UBYTE = getTypeSafe("System.Byte")
- CHAR = getTypeSafe("System.Char")
- SHORT = getTypeSafe("System.Int16")
- USHORT = getTypeSafe("System.UInt16")
- INT = getTypeSafe("System.Int32")
- UINT = getTypeSafe("System.UInt32")
- LONG = getTypeSafe("System.Int64")
- ULONG = getTypeSafe("System.UInt64")
- FLOAT = getTypeSafe("System.Single")
- DOUBLE = getTypeSafe("System.Double")
- BOOLEAN = getTypeSafe("System.Boolean")
- VOID = getTypeSafe("System.Void")
- ENUM = getTypeSafe("System.Enum")
- DELEGATE = getTypeSafe("System.MulticastDelegate")
-
- OBJECT = getTypeSafe("System.Object")
- STRING = getTypeSafe("System.String")
- STRING_ARRAY = getTypeSafe("System.String[]")
- VALUE_TYPE = getTypeSafe("System.ValueType")
-
- SCALA_SYMTAB_ATTR = getTypeSafe("scala.runtime.SymtabAttribute")
- val bytearray: Array[Type] = Array(Type.GetType("System.Byte[]"))
- SYMTAB_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(bytearray)
- SYMTAB_DEFAULT_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(Type.EmptyTypes)
-
- val delegate: Type = getTypeSafe("System.Delegate")
- val dargs: Array[Type] = Array(delegate, delegate)
- DELEGATE_COMBINE = delegate.GetMethod("Combine", dargs)
- DELEGATE_REMOVE = delegate.GetMethod("Remove", dargs)
- }
- catch {
- case e: RuntimeException =>
- Console.println(e.getMessage)
- throw e
- }
-
- //##########################################################################
- // type mapping and lookup
-
- def getType(name: String): Type = Type.GetType(name)
-
- def getTypeSafe(name: String): Type = {
- val t = Type.GetType(name)
- assert(t != null, name)
- t
- }
-
- def mkArrayType(elemType: Type): Type = getType(elemType.FullName + "[]")
-
- def isDelegateType(t: Type): Boolean = { t.BaseType() == DELEGATE }
-} // CLRTypes
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
deleted file mode 100644
index 5a0253c18b..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ /dev/null
@@ -1,850 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.IOException
-import io.MsilFile
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _}
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.UnPickler
-import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
-import scala.language.implicitConversions
-
-/**
- * @author Nikolay Mihaylov
- */
-abstract class TypeParser {
-
- val global: Global
-
- import global._
- import loaders.clrTypes
-
- //##########################################################################
-
- private var clazz: Symbol = _
- private var instanceDefs: Scope = _ // was members
- private var staticModule: Symbol = _ // was staticsClass
- private var staticDefs: Scope = _ // was statics
-
- protected def statics: Symbol = staticModule.moduleClass
-
- protected var busy: Boolean = false // lock to detect recursive reads
-
- private object unpickler extends UnPickler {
- val global: TypeParser.this.global.type = TypeParser.this.global
- }
-
- def parse(typ: MSILType, root: Symbol) {
-
- def handleError(e: Throwable) = {
- if (settings.debug.value) e.printStackTrace() //debug
- throw new IOException("type '" + typ.FullName + "' is broken\n(" + e.getMessage() + ")")
- }
- assert(!busy)
- busy = true
-
- if (root.isModule) {
- this.clazz = root.companionClass
- this.staticModule = root
- } else {
- this.clazz = root
- this.staticModule = root.companionModule
- }
- try {
- parseClass(typ)
- } catch {
- case e: FatalError => handleError(e)
- case e: RuntimeException => handleError(e)
- }
- busy = false
- }
-
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
- override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
- }
-
- /* the names `classTParams` and `newTParams` stem from the forJVM version (ClassfileParser.sigToType())
- * but there are differences that should be kept in mind.
- * forMSIL, a nested class knows nothing about any type-params in the nesting class,
- * therefore newTParams is redundant (other than for recording lexical order),
- * it always contains the same elements as classTParams.value */
- val classTParams = scala.collection.mutable.Map[Int,Symbol]() // TODO should this be a stack? (i.e., is it possible for >1 invocation to getCLRType on the same TypeParser instance be active )
- val newTParams = new scala.collection.mutable.ListBuffer[Symbol]()
- val methodTParams = scala.collection.mutable.Map[Int,Symbol]()
-
- private def sig2typeBounds(tvarCILDef: GenericParamAndConstraints): Type = {
- val ts = new scala.collection.mutable.ListBuffer[Type]
- for (cnstrnt <- tvarCILDef.Constraints) {
- ts += getCLRType(cnstrnt) // TODO we're definitely not at or after erasure, no need to call objToAny, right?
- }
- TypeBounds.upper(intersectionType(ts.toList, clazz))
- // TODO variance???
- }
-
- private def createViewFromTo(viewSuffix : String, fromTpe : Type, toTpe : Type,
- addToboxMethodMap : Boolean, isAddressOf : Boolean) : Symbol = {
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
- val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(List(fromTpe)), toTpe)
- val vmsym = createMethod(nme.view_ + viewSuffix, flags, viewMethodType, null, true);
- // !!! this used to mutate a mutable map in definitions, but that map became
- // immutable and this kept "working" with a no-op. So now it's commented out
- // since I retired the deprecated code which allowed for that bug.
- //
- // if (addToboxMethodMap) definitions.boxMethod(clazz) = vmsym
-
- if (isAddressOf) clrTypes.addressOfViews += vmsym
- vmsym
- }
-
- private def createDefaultConstructor(typ: MSILType) {
- val attrs = MethodAttributes.Public | MethodAttributes.RTSpecialName | MethodAttributes.SpecialName // TODO instance
- val declType= typ
- val method = new ConstructorInfo(declType, attrs, Array[MSILType]())
- val flags = Flags.JAVA
- val owner = clazz
- val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags)
- val rettype = clazz.tpe
- val mtype = methodType(Array[MSILType](), rettype);
- val mInfo = mtype(methodSym)
- methodSym.setInfo(mInfo)
- instanceDefs.enter(methodSym);
- clrTypes.constructors(methodSym) = method
- }
-
- private def parseClass(typ: MSILType) {
-
- {
- val t4c = clrTypes.types.get(clazz)
- assert(t4c == None || t4c == Some(typ))
- }
- clrTypes.types(clazz) = typ
-
- {
- val c4t = clrTypes.sym2type.get(typ)
- assert(c4t == None || c4t == Some(clazz))
- }
- clrTypes.sym2type(typ) = clazz
-
- if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false);
- assert (attrs.length == 1, attrs.length);
- val a = attrs(0).asInstanceOf[MSILAttribute];
- assert (a.getConstructor() == clrTypes.SYMTAB_CONSTR);
- val symtab = a.getConstructorArguments()(0).asInstanceOf[Array[Byte]]
- unpickler.unpickle(symtab, 0, clazz, staticModule, typ.FullName);
- val mClass = clrTypes.getType(typ.FullName + "$");
- if (mClass != null) {
- clrTypes.types(statics) = mClass;
- val moduleInstance = mClass.GetField("MODULE$");
- assert (moduleInstance != null, mClass);
- clrTypes.fields(statics) = moduleInstance;
- }
- return
- }
- val flags = translateAttributes(typ)
-
- var clazzBoxed : Symbol = NoSymbol
- var clazzMgdPtr : Symbol = NoSymbol
-
- val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum")
-
- if(canBeTakenAddressOf) {
- clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed"))
- clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr"))
- clrTypes.mdgptrcls4clssym(clazz) = clazzMgdPtr
- /* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed,
- before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */
- val typMgdPtr = MSILType.mkByRef(typ)
- clrTypes.types(clazzMgdPtr) = typMgdPtr
- clrTypes.sym2type(typMgdPtr) = clazzMgdPtr
- /* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance,
- because there's no metadata-level representation for a "boxed valuetype" */
- val instanceDefsMgdPtr = newScope
- val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr)
- clazzMgdPtr.setFlag(flags)
- clazzMgdPtr.setInfo(classInfoMgdPtr)
- }
-
-/* START CLR generics (snippet 1) */
- // first pass
- for (tvarCILDef <- typ.getSortedTVars() ) {
- val tpname = newTypeName(tvarCILDef.Name.replaceAll("!", "")) // TODO are really all type-params named in all assemblies out there? (NO)
- val tpsym = clazz.newTypeParameter(tpname)
- classTParams.put(tvarCILDef.Number, tpsym)
- newTParams += tpsym
- // TODO wouldn't the following also be needed later, i.e. during getCLRType
- tpsym.setInfo(definitions.AnyClass.tpe)
- }
- // second pass
- for (tvarCILDef <- typ.getSortedTVars() ) {
- val tpsym = classTParams(tvarCILDef.Number)
- tpsym.setInfo(sig2typeBounds(tvarCILDef)) // we never skip bounds unlike in forJVM
- }
-/* END CLR generics (snippet 1) */
- val ownTypeParams = newTParams.toList
-/* START CLR generics (snippet 2) */
- if (!ownTypeParams.isEmpty) {
- clazz.setInfo(new TypeParamsType(ownTypeParams))
- if(typ.IsValueType && !typ.IsEnum) {
- clazzBoxed.setInfo(new TypeParamsType(ownTypeParams))
- }
- }
-/* END CLR generics (snippet 2) */
- instanceDefs = newScope
- staticDefs = newScope
-
- val classInfoAsInMetadata = {
- val ifaces: Array[MSILType] = typ.getInterfaces()
- val superType = if (typ.BaseType() != null) getCLRType(typ.BaseType())
- else if (typ.IsInterface()) definitions.ObjectClass.tpe
- else definitions.AnyClass.tpe; // this branch activates for System.Object only.
- // parents (i.e., base type and interfaces)
- val parents = new scala.collection.mutable.ListBuffer[Type]()
- parents += superType
- for (iface <- ifaces) {
- parents += getCLRType(iface) // here the variance doesn't matter
- }
- // methods, properties, events, fields are entered in a moment
- if (canBeTakenAddressOf) {
- val instanceDefsBoxed = newScope
- ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed)
- } else
- ClassInfoType(parents.toList, instanceDefs, clazz)
- }
-
- val staticInfo = ClassInfoType(List(), staticDefs, statics)
-
- clazz.setFlag(flags)
-
- if (canBeTakenAddressOf) {
- clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else genPolyType(ownTypeParams, classInfoAsInMetadata) )
- clazzBoxed.setFlag(flags)
- val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz)
- clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType
- else genPolyType(ownTypeParams, rawValueInfoType) )
- } else {
- clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else genPolyType(ownTypeParams, classInfoAsInMetadata) )
- }
-
- // TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params
- statics.setFlag(Flags.JAVA)
- statics.setInfo(staticInfo)
- staticModule.setFlag(Flags.JAVA)
- staticModule.setInfo(statics.tpe)
-
-
- if (canBeTakenAddressOf) {
- // implicit conversions are owned by staticModule.moduleClass
- createViewFromTo("2Boxed", clazz.tpe, clazzBoxed.tpe, addToboxMethodMap = true, isAddressOf = false)
- // createViewFromTo("2Object", clazz.tpe, definitions.ObjectClass.tpe, addToboxMethodMap = true, isAddressOf = false)
- createViewFromTo("2MgdPtr", clazz.tpe, clazzMgdPtr.tpe, addToboxMethodMap = false, isAddressOf = true)
- // a return can't have type managed-pointer, thus a dereference-conversion is not needed
- // similarly, a method can't declare as return type "boxed valuetype"
- if (!typ.IsEnum) {
- // a synthetic default constructor for raw-type allows `new X' syntax
- createDefaultConstructor(typ)
- }
- }
-
- // import nested types
- for (ntype <- typ.getNestedTypes() if !(ntype.IsNestedPrivate || ntype.IsNestedAssembly || ntype.IsNestedFamANDAssem)
- || ntype.IsInterface /* TODO why shouldn't nested ifaces be type-parsed too? */ )
- {
- val loader = new loaders.MsilFileLoader(new MsilFile(ntype))
- val nclazz = statics.newClass(ntype.Name)
- val nmodule = statics.newModule(ntype.Name)
- nclazz.setInfo(loader)
- nmodule.setInfo(loader)
- staticDefs.enter(nclazz)
- staticDefs.enter(nmodule)
-
- assert(nclazz.companionModule == nmodule, nmodule)
- assert(nmodule.companionClass == nclazz, nclazz)
- }
-
- val fields = typ.getFields()
- for (field <- fields
- if !(field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly)
- if (getCLRType(field.FieldType) != null)
- ) {
- assert (!field.FieldType.IsPointer && !field.FieldType.IsByRef, "CLR requirement")
- val flags = translateAttributes(field);
- val name = newTermName(field.Name);
- val fieldType =
- if (field.IsLiteral && !field.FieldType.IsEnum && isDefinedAtgetConstant(getCLRType(field.FieldType)))
- ConstantType(getConstant(getCLRType(field.FieldType), field.getValue))
- else
- getCLRType(field.FieldType)
- val owner = if (field.IsStatic()) statics else clazz;
- val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType);
- // TODO: set private within!!! -> look at typechecker/Namers.scala
- (if (field.IsStatic()) staticDefs else instanceDefs).enter(sym);
- clrTypes.fields(sym) = field;
- }
-
- for (constr <- typ.getConstructors() if !constr.IsStatic() && !constr.IsPrivate() &&
- !constr.IsAssembly() && !constr.IsFamilyAndAssembly() && !constr.HasPtrParamOrRetType())
- createMethod(constr);
-
- // initially also contains getters and setters of properties.
- val methodsSet = new mutable.HashSet[MethodInfo]();
- methodsSet ++= typ.getMethods();
-
- for (prop <- typ.getProperties) {
- val propType: Type = getCLSType(prop.PropertyType);
- if (propType != null) {
- val getter: MethodInfo = prop.GetGetMethod(true);
- val setter: MethodInfo = prop.GetSetMethod(true);
- var gparamsLength: Int = -1;
- if (!(getter == null || getter.IsPrivate || getter.IsAssembly
- || getter.IsFamilyAndAssembly || getter.HasPtrParamOrRetType))
- {
- assert(prop.PropertyType == getter.ReturnType);
- val gparams: Array[ParameterInfo] = getter.GetParameters();
- gparamsLength = gparams.length;
- val name: TermName = if (gparamsLength == 0) prop.Name else nme.apply;
- val flags = translateAttributes(getter);
- val owner: Symbol = if (getter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(name, NoPosition, flags)
- val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic
- else methodType(getter, getter.ReturnType)(methodSym)
- methodSym.setInfo(mtype);
- methodSym.setFlag(Flags.ACCESSOR);
- (if (getter.IsStatic) staticDefs else instanceDefs).enter(methodSym)
- clrTypes.methods(methodSym) = getter;
- methodsSet -= getter;
- }
- if (!(setter == null || setter.IsPrivate || setter.IsAssembly
- || setter.IsFamilyAndAssembly || setter.HasPtrParamOrRetType))
- {
- val sparams: Array[ParameterInfo] = setter.GetParameters()
- if(getter != null)
- assert(getter.IsStatic == setter.IsStatic);
- assert(setter.ReturnType == clrTypes.VOID);
- if(getter != null)
- assert(sparams.length == gparamsLength + 1, "" + getter + "; " + setter);
-
- val name: TermName = if (gparamsLength == 0) nme.getterToSetter(prop.Name)
- else nme.update;
- val flags = translateAttributes(setter);
- val mtype = methodType(setter, definitions.UnitClass.tpe);
- val owner: Symbol = if (setter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(name, NoPosition, flags)
- methodSym.setInfo(mtype(methodSym))
- methodSym.setFlag(Flags.ACCESSOR);
- (if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym);
- clrTypes.methods(methodSym) = setter;
- methodsSet -= setter;
- }
- }
- }
-
-/* for (event <- typ.GetEvents) {
- // adding += and -= methods to add delegates to an event.
- // raising the event ist not possible from outside the class (this is so
- // generally in .net world)
- val adder: MethodInfo = event.GetAddMethod();
- val remover: MethodInfo = event.GetRemoveMethod();
- if (!(adder == null || adder.IsPrivate || adder.IsAssembly
- || adder.IsFamilyAndAssembly))
- {
- assert(adder.ReturnType == clrTypes.VOID);
- assert(adder.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
- val name = encode("+=");
- val flags = translateAttributes(adder);
- val mtype: Type = methodType(adder, adder.ReturnType);
- createMethod(name, flags, mtype, adder, adder.IsStatic)
- methodsSet -= adder;
- }
- if (!(remover == null || remover.IsPrivate || remover.IsAssembly
- || remover.IsFamilyAndAssembly))
- {
- assert(remover.ReturnType == clrTypes.VOID);
- assert(remover.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
- val name = encode("-=");
- val flags = translateAttributes(remover);
- val mtype: Type = methodType(remover, remover.ReturnType);
- createMethod(name, flags, mtype, remover, remover.IsStatic)
- methodsSet -= remover;
- }
- } */
-
-/* Adds view amounting to syntax sugar for a CLR implicit overload.
- The long-form syntax can also be supported if "methodsSet -= method" (last statement) is removed.
-
- /* remember, there's typ.getMethods and type.GetMethods */
- for (method <- typ.getMethods)
- if(!method.HasPtrParamOrRetType &&
- method.IsPublic && method.IsStatic && method.IsSpecialName &&
- method.Name == "op_Implicit") {
- // create a view: typ => method's return type
- val viewRetType: Type = getCLRType(method.ReturnType)
- val viewParamTypes: List[Type] = method.GetParameters().map(_.ParameterType).map(getCLSType).toList;
- /* The spec says "The operator method shall be defined as a static method on either the operand or return type."
- * We don't consider the declaring type for the purposes of definitions.functionType,
- * instead we regard op_Implicit's argument type and return type as defining the view's signature.
- */
- if (viewRetType != null && !viewParamTypes.contains(null)) {
- /* The check above applies e.g. to System.Decimal that has a conversion from UInt16, a non-CLS type, whose CLS-mapping returns null */
- val funType: Type = definitions.functionType(viewParamTypes, viewRetType);
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
- val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(viewParamTypes), funType)
- val vmsym = createMethod(nme.view_, flags, viewMethodType, method, true);
- methodsSet -= method;
- }
- }
-*/
-
- for (method <- methodsSet.iterator)
- if (!method.IsPrivate() && !method.IsAssembly() && !method.IsFamilyAndAssembly()
- && !method.HasPtrParamOrRetType)
- createMethod(method);
-
- // Create methods and views for delegate support
- if (clrTypes.isDelegateType(typ)) {
- createDelegateView(typ)
- createDelegateChainers(typ)
- }
-
- // for enumerations introduce comparison and bitwise logical operations;
- // the backend will recognize them and replace them with comparison or
- // bitwise logical operations on the primitive underlying type
-
- if (typ.IsEnum) {
- val ENUM_CMP_NAMES = List(nme.EQ, nme.NE, nme.LT, nme.LE, nme.GT, nme.GE);
- val ENUM_BIT_LOG_NAMES = List(nme.OR, nme.AND, nme.XOR);
-
- val flags = Flags.JAVA | Flags.FINAL
- for (cmpName <- ENUM_CMP_NAMES) {
- val enumCmp = clazz.newMethod(cmpName)
- val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe)
- enumCmp.setFlag(flags).setInfo(enumCmpType)
- instanceDefs.enter(enumCmp)
- }
-
- for (bitLogName <- ENUM_BIT_LOG_NAMES) {
- val enumBitLog = clazz.newMethod(bitLogName)
- val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */)
- enumBitLog.setFlag(flags).setInfo(enumBitLogType)
- instanceDefs.enter(enumBitLog)
- }
- }
-
- } // parseClass
-
- private def populateMethodTParams(method: MethodBase, methodSym: MethodSymbol) : List[Symbol] = {
- if(!method.IsGeneric) Nil
- else {
- methodTParams.clear
- val newMethodTParams = new scala.collection.mutable.ListBuffer[Symbol]()
-
- // first pass
- for (mvarCILDef <- method.getSortedMVars() ) {
- val mtpname = newTypeName(mvarCILDef.Name.replaceAll("!", "")) // TODO are really all method-level-type-params named in all assemblies out there? (NO)
- val mtpsym = methodSym.newTypeParameter(mtpname)
- methodTParams.put(mvarCILDef.Number, mtpsym)
- newMethodTParams += mtpsym
- // TODO wouldn't the following also be needed later, i.e. during getCLRType
- mtpsym.setInfo(definitions.AnyClass.tpe)
- }
- // second pass
- for (mvarCILDef <- method.getSortedMVars() ) {
- val mtpsym = methodTParams(mvarCILDef.Number)
- mtpsym.setInfo(sig2typeBounds(mvarCILDef)) // we never skip bounds unlike in forJVM
- }
-
- newMethodTParams.toList
- }
- }
-
- private def createMethod(method: MethodBase) {
-
- val flags = translateAttributes(method);
- val owner = if (method.IsStatic()) statics else clazz;
- val methodSym = owner.newMethod(getName(method), NoPosition, flags)
- /* START CLR generics (snippet 3) */
- val newMethodTParams = populateMethodTParams(method, methodSym)
- /* END CLR generics (snippet 3) */
-
- val rettype = if (method.IsConstructor()) clazz.tpe
- else getCLSType(method.asInstanceOf[MethodInfo].ReturnType);
- if (rettype == null) return;
- val mtype = methodType(method, rettype);
- if (mtype == null) return;
-/* START CLR generics (snippet 4) */
- val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym))
- else mtype(methodSym)
-/* END CLR generics (snippet 4) */
-/* START CLR non-generics (snippet 4)
- val mInfo = mtype(methodSym)
- END CLR non-generics (snippet 4) */
- methodSym.setInfo(mInfo)
- (if (method.IsStatic()) staticDefs else instanceDefs).enter(methodSym);
- if (method.IsConstructor())
- clrTypes.constructors(methodSym) = method.asInstanceOf[ConstructorInfo]
- else clrTypes.methods(methodSym) = method.asInstanceOf[MethodInfo];
- }
-
- private def createMethod(name: TermName, flags: Long, args: Array[MSILType], retType: MSILType, method: MethodInfo, statik: Boolean): Symbol = {
- val mtype = methodType(args, getCLSType(retType))
- assert(mtype != null)
- createMethod(name, flags, mtype, method, statik)
- }
-
- private def createMethod(name: TermName, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
- val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name)
- methodSym.setFlag(flags).setInfo(mtype(methodSym))
- (if (statik) staticDefs else instanceDefs).enter(methodSym)
- if (method != null)
- clrTypes.methods(methodSym) = method
- methodSym
- }
-
- private def createDelegateView(typ: MSILType) = {
- val invoke: MethodInfo = typ.GetMember("Invoke")(0).asInstanceOf[MethodInfo];
- val invokeRetType: Type = getCLRType(invoke.ReturnType);
- val invokeParamTypes: List[Type] =invoke.GetParameters().map(_.ParameterType).map(getCLSType).toList;
- val funType: Type = definitions.functionType(invokeParamTypes, invokeRetType);
-
- val typClrType: Type = getCLRType(typ);
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? think not needed
-
- // create the forward view: delegate => function
- val delegateParamTypes: List[Type] = List(typClrType);
- // not ImplicitMethodType, this is for methods with implicit parameters (not implicit methods)
- val forwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(delegateParamTypes), funType)
- val fmsym = createMethod(nme.view_, flags, forwardViewMethodType, null, true);
-
- // create the backward view: function => delegate
- val functionParamTypes: List[Type] = List(funType);
- val backwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(functionParamTypes), typClrType)
- val bmsym = createMethod(nme.view_, flags, backwardViewMethodType, null, true);
- }
-
- private def createDelegateChainers(typ: MSILType) = {
- val flags: Long = Flags.JAVA | Flags.FINAL
- val args: Array[MSILType] = Array(typ)
-
- var s = createMethod(encode("+="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_COMBINE, false);
- s = createMethod(encode("-="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_REMOVE, false);
-
- s = createMethod(nme.PLUS, flags, args, typ, clrTypes.DELEGATE_COMBINE, false);
- s = createMethod(nme.MINUS, flags, args, typ, clrTypes.DELEGATE_REMOVE, false);
- }
-
- private def getName(method: MethodBase): TermName = {
-
- def operatorOverload(name : String, paramsArity : Int) : Option[Name] = paramsArity match {
- case 1 => name match {
- // PartitionI.10.3.1
- case "op_Decrement" => Some(encode("--"))
- case "op_Increment" => Some(encode("++"))
- case "op_UnaryNegation" => Some(nme.UNARY_-)
- case "op_UnaryPlus" => Some(nme.UNARY_+)
- case "op_LogicalNot" => Some(nme.UNARY_!)
- case "op_OnesComplement" => Some(nme.UNARY_~)
- /* op_True and op_False have no operator symbol assigned,
- Other methods that will have to be written in full are:
- op_AddressOf & (unary)
- op_PointerDereference * (unary) */
- case _ => None
- }
- case 2 => name match {
- // PartitionI.10.3.2
- case "op_Addition" => Some(nme.ADD)
- case "op_Subtraction" => Some(nme.SUB)
- case "op_Multiply" => Some(nme.MUL)
- case "op_Division" => Some(nme.DIV)
- case "op_Modulus" => Some(nme.MOD)
- case "op_ExclusiveOr" => Some(nme.XOR)
- case "op_BitwiseAnd" => Some(nme.AND)
- case "op_BitwiseOr" => Some(nme.OR)
- case "op_LogicalAnd" => Some(nme.ZAND)
- case "op_LogicalOr" => Some(nme.ZOR)
- case "op_LeftShift" => Some(nme.LSL)
- case "op_RightShift" => Some(nme.ASR)
- case "op_Equality" => Some(nme.EQ)
- case "op_GreaterThan" => Some(nme.GT)
- case "op_LessThan" => Some(nme.LT)
- case "op_Inequality" => Some(nme.NE)
- case "op_GreaterThanOrEqual" => Some(nme.GE)
- case "op_LessThanOrEqual" => Some(nme.LE)
-
- /* op_MemberSelection is reserved in Scala */
-
- /* The standard does not assign operator symbols to op_Assign , op_SignedRightShift , op_UnsignedRightShift ,
- * and op_UnsignedRightShiftAssignment so those names will be used instead to invoke those methods. */
-
- /*
- The remaining binary operators are not overloaded in C# and are therefore not in widespread use. They have to be written in full.
-
- op_RightShiftAssignment >>=
- op_MultiplicationAssignment *=
- op_PointerToMemberSelection ->*
- op_SubtractionAssignment -=
- op_ExclusiveOrAssignment ^=
- op_LeftShiftAssignment <<=
- op_ModulusAssignment %=
- op_AdditionAssignment +=
- op_BitwiseAndAssignment &=
- op_BitwiseOrAssignment |=
- op_Comma ,
- op_DivisionAssignment /=
- */
- case _ => None
- }
- case _ => None
- }
-
- if (method.IsConstructor()) return nme.CONSTRUCTOR;
- val name = method.Name;
- if (method.IsStatic()) {
- if(method.IsSpecialName) {
- val paramsArity = method.GetParameters().size
- // handle operator overload, otherwise handle as any static method
- val operName = operatorOverload(name, paramsArity)
- if (operName.isDefined) { return operName.get; }
- }
- return newTermName(name);
- }
- val params = method.GetParameters();
- name match {
- case "GetHashCode" if (params.length == 0) => nme.hashCode_;
- case "ToString" if (params.length == 0) => nme.toString_;
- case "Finalize" if (params.length == 0) => nme.finalize_;
- case "Equals" if (params.length == 1 && params(0).ParameterType == clrTypes.OBJECT) =>
- nme.equals_;
- case "Invoke" if (clrTypes.isDelegateType(method.DeclaringType)) => nme.apply;
- case _ => newTermName(name);
- }
- }
-
- //##########################################################################
-
- private def methodType(method: MethodBase, rettype: MSILType): Symbol => Type = {
- val rtype = getCLSType(rettype);
- if (rtype == null) null else methodType(method, rtype);
- }
-
- /** Return a method type for the given method. */
- private def methodType(method: MethodBase, rettype: Type): Symbol => Type =
- methodType(method.GetParameters().map(_.ParameterType), rettype);
-
- /** Return a method type for the provided argument types and return type. */
- private def methodType(argtypes: Array[MSILType], rettype: Type): Symbol => Type = {
- def paramType(typ: MSILType): Type =
- if (typ eq clrTypes.OBJECT) definitions.AnyClass.tpe // TODO a hack to compile scalalib, should be definitions.AnyRefClass.tpe
- else getCLSType(typ);
- val ptypes = argtypes.map(paramType).toList;
- if (ptypes.contains(null)) null
- else method => JavaMethodType(method.newSyntheticValueParams(ptypes), rettype);
- }
-
- //##########################################################################
-
- private def getClassType(typ: MSILType): Type = {
- assert(typ != null);
- val res = rootMirror.getClassByName(typ.FullName.replace('+', '.') : TypeName).tpe;
- //if (res.isError())
- // global.reporter.error("unknown class reference " + type.FullName);
- res
- }
-
- private def getCLSType(typ: MSILType): Type = { // getCLS returns non-null for types GenMSIL can handle, be they CLS-compliant or not
- if (typ.IsTMVarUsage())
- /* START CLR generics (snippet 5) */
- getCLRType(typ)
- /* END CLR generics (snippet 5) */
- /* START CLR non-generics (snippet 5)
- null
- END CLR non-generics (snippet 5) */
- else if ( /* TODO hack if UBYE, uncommented, "ambiguous reference to overloaded definition" ensues, for example for System.Math.Max(x, y) */
- typ == clrTypes.USHORT || typ == clrTypes.UINT || typ == clrTypes.ULONG
- /* || typ == clrTypes.UBYTE */
- || typ.IsNotPublic() || typ.IsNestedPrivate()
- || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem()
- || typ.IsPointer()
- || (typ.IsArray() && getCLRType(typ.GetElementType()) == null) /* TODO hack: getCLR instead of getCLS */
- || (typ.IsByRef() && !typ.GetElementType().CanBeTakenAddressOf()))
- null
- else
- getCLRType(typ)
- }
-
- private def getCLRTypeIfPrimitiveNullOtherwise(typ: MSILType): Type =
- if (typ == clrTypes.OBJECT)
- definitions.ObjectClass.tpe;
- else if (typ == clrTypes.VALUE_TYPE)
- definitions.AnyValClass.tpe
- else if (typ == clrTypes.STRING)
- definitions.StringClass.tpe;
- else if (typ == clrTypes.VOID)
- definitions.UnitClass.tpe
- else if (typ == clrTypes.BOOLEAN)
- definitions.BooleanClass.tpe
- else if (typ == clrTypes.CHAR)
- definitions.CharClass.tpe
- else if ((typ == clrTypes.BYTE) || (typ == clrTypes.UBYTE)) // TODO U... is a hack to compile scalalib
- definitions.ByteClass.tpe
- else if ((typ == clrTypes.SHORT) || (typ == clrTypes.SHORT)) // TODO U... is a hack to compile scalalib
- definitions.ShortClass.tpe
- else if ((typ == clrTypes.INT) || (typ == clrTypes.UINT)) // TODO U... is a hack to compile scalalib
- definitions.IntClass.tpe
- else if ((typ == clrTypes.LONG) || (typ == clrTypes.LONG)) // TODO U... is a hack to compile scalalib
- definitions.LongClass.tpe
- else if (typ == clrTypes.FLOAT)
- definitions.FloatClass.tpe
- else if (typ == clrTypes.DOUBLE)
- definitions.DoubleClass.tpe
- else null
-
-
- private def getCLRType(tMSIL: MSILType): Type = {
- var res = getCLRTypeIfPrimitiveNullOtherwise(tMSIL)
- if (res != null) res
- else if (tMSIL.isInstanceOf[ConstructedType]) {
- val ct = tMSIL.asInstanceOf[ConstructedType]
- /* START CLR generics (snippet 6) */
- val cttpArgs = ct.typeArgs.map(tmsil => getCLRType(tmsil)).toList
- appliedType(getCLRType(ct.instantiatedType), cttpArgs)
- /* END CLR generics (snippet 6) */
- /* START CLR non-generics (snippet 6)
- getCLRType(ct.instantiatedType)
- END CLR non-generics (snippet 6) */
- } else if (tMSIL.isInstanceOf[TMVarUsage]) {
- /* START CLR generics (snippet 7) */
- val tVarUsage = tMSIL.asInstanceOf[TMVarUsage]
- val tVarNumber = tVarUsage.Number
- if (tVarUsage.isTVar) classTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
- else methodTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
- /* END CLR generics (snippet 7) */
- /* START CLR non-generics (snippet 7)
- null // definitions.ObjectClass.tpe
- END CLR non-generics (snippet 7) */
- } else if (tMSIL.IsArray()) {
- var elemtp = getCLRType(tMSIL.GetElementType())
- // cut&pasted from ClassfileParser
- // make unbounded Array[T] where T is a type variable into Array[T with Object]
- // (this is necessary because such arrays have a representation which is incompatible
- // with arrays of primitive types).
- // TODO does that incompatibility also apply to .NET?
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
- elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
- appliedType(definitions.ArrayClass.tpe, List(elemtp))
- } else {
- res = clrTypes.sym2type.get(tMSIL) match {
- case Some(sym) => sym.tpe
- case None => if (tMSIL.IsByRef && tMSIL.GetElementType.IsValueType) {
- val addressed = getCLRType(tMSIL.GetElementType)
- val clasym = addressed.typeSymbolDirect // TODO should be .typeSymbol?
- clasym.info.load(clasym)
- val secondAttempt = clrTypes.sym2type.get(tMSIL)
- secondAttempt match { case Some(sym) => sym.tpe
- case None => null
- }
- } else getClassType(tMSIL)
- }
- if (res == null)
- null // TODO new RuntimeException()
- else res
- }
- }
-
- // the values are Java-Box-Classes (e.g. Integer, Boolean, Character)
- // java.lang.Number to get the value (if a number, not for boolean, character)
- // see ch.epfl.lamp.compiler.msil.util.PEStream.java
- def getConstant(constType: Type, value: Object): Constant = {
- val typeClass = constType.typeSymbol
- if (typeClass == definitions.BooleanClass)
- Constant(value.asInstanceOf[java.lang.Boolean].booleanValue)
- else if (typeClass == definitions.ByteClass)
- Constant(value.asInstanceOf[java.lang.Number].byteValue)
- else if (typeClass == definitions.ShortClass)
- Constant(value.asInstanceOf[java.lang.Number].shortValue)
- else if (typeClass == definitions.CharClass)
- Constant(value.asInstanceOf[java.lang.Character].charValue)
- else if (typeClass == definitions.IntClass)
- Constant(value.asInstanceOf[java.lang.Number].intValue)
- else if (typeClass == definitions.LongClass)
- Constant(value.asInstanceOf[java.lang.Number].longValue)
- else if (typeClass == definitions.FloatClass)
- Constant(value.asInstanceOf[java.lang.Number].floatValue)
- else if (typeClass == definitions.DoubleClass)
- Constant(value.asInstanceOf[java.lang.Number].doubleValue)
- else if (typeClass == definitions.StringClass)
- Constant(value.asInstanceOf[java.lang.String])
- else
- abort("illegal value: " + value + ", class-symbol: " + typeClass)
- }
-
- def isDefinedAtgetConstant(constType: Type): Boolean = {
- val typeClass = constType.typeSymbol
- if ( (typeClass == definitions.BooleanClass)
- || (typeClass == definitions.ByteClass)
- || (typeClass == definitions.ShortClass)
- || (typeClass == definitions.CharClass)
- || (typeClass == definitions.IntClass)
- || (typeClass == definitions.LongClass)
- || (typeClass == definitions.FloatClass)
- || (typeClass == definitions.DoubleClass)
- || (typeClass == definitions.StringClass)
- )
- true
- else
- false
- }
-
- private def translateAttributes(typ: MSILType): Long = {
- var flags: Long = Flags.JAVA;
- if (typ.IsNotPublic() || typ.IsNestedPrivate()
- || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem())
- flags = flags | Flags.PRIVATE;
- else if (typ.IsNestedFamily() || typ.IsNestedFamORAssem())
- flags = flags | Flags.PROTECTED;
- if (typ.IsAbstract())
- flags = flags | Flags.ABSTRACT;
- if (typ.IsSealed())
- flags = flags | Flags.FINAL;
- if (typ.IsInterface())
- flags = flags | Flags.INTERFACE | Flags.TRAIT | Flags.ABSTRACT;
-
- flags
- }
-
- private def translateAttributes(field: FieldInfo): Long = {
- var flags: Long = Flags.JAVA;
- if (field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly())
- flags = flags | Flags.PRIVATE;
- else if (field.IsFamily() || field.IsFamilyOrAssembly())
- flags = flags | Flags.PROTECTED;
- if (field.IsInitOnly() || field.IsLiteral())
- flags = flags | Flags.FINAL;
- else
- flags = flags | Flags.MUTABLE;
- if (field.IsStatic)
- flags = flags | Flags.STATIC
-
- flags
- }
-
- private def translateAttributes(method: MethodBase): Long = {
- var flags: Long = Flags.JAVA;
- if (method.IsPrivate() || method.IsAssembly() || method.IsFamilyAndAssembly())
- flags = flags | Flags.PRIVATE;
- else if (method.IsFamily() || method.IsFamilyOrAssembly())
- flags = flags | Flags.PROTECTED;
- if (method.IsAbstract())
- flags = flags | Flags.DEFERRED;
- if (method.IsStatic)
- flags = flags | Flags.STATIC
-
- flags
- }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index bacd8c39e1..5fbc15f858 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -8,8 +8,6 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -94,7 +92,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
impl.typeOfThis = iface.typeOfThis
impl.thisSym setName iface.thisSym.name
}
- impl.sourceFile = iface.sourceFile
+ impl.associatedFile = iface.sourceFile
if (inClass)
iface.owner.info.decls enter impl
@@ -111,7 +109,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
def implClass(iface: Symbol): Symbol = {
iface.info
- implClassMap.getOrElse(iface, atPhase(implClassPhase) {
+ implClassMap.getOrElse(iface, enteringPhase(implClassPhase) {
if (iface.implClass eq NoSymbol)
debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
else
@@ -196,7 +194,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case PolyType(_, restpe) =>
implType(restpe)
}
- implSym setInfo implType(beforeErasure(iface.info))
+ implSym setInfo implType(enteringErasure(iface.info))
}
override def load(clazz: Symbol) { complete(clazz) }
@@ -317,10 +315,10 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
// body until now, because the typer knows that Any has no
// constructor and won't accept a call to super.init.
assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
- Block(List(Apply(gen.mkSuperSelect, Nil)), expr)
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), expr)
case Block(stats, expr) =>
- // needs `hasSymbol` check because `supercall` could be a block (named / default args)
+ // needs `hasSymbolField` check because `supercall` could be a block (named / default args)
val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
}
@@ -352,7 +350,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
val mix1 = mix
if (mix == tpnme.EMPTY) mix
else {
- val ps = beforeErasure {
+ val ps = enteringErasure {
sym.info.parents dropWhile (p => p.symbol.name != mix)
}
assert(!ps.isEmpty, tree);
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 7a0b034fd0..a871c72fc2 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -46,7 +46,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
result
}
private def transformTemplate(tree: Tree) = {
- val Template(parents, self, body) = tree
+ val Template(_, _, body) = tree
clearStatics()
val newBody = transformTrees(body)
val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
@@ -69,12 +69,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case "mono-cache" => MONO_CACHE
case "poly-cache" => POLY_CACHE
}
-
- def shouldRewriteTry(tree: Try) = {
- val sym = tree.tpe.typeSymbol
- forMSIL && (sym != UnitClass) && (sym != NothingClass)
- }
-
private def typedWithPos(pos: Position)(tree: Tree) =
localTyper.typedPos(pos)(tree)
@@ -120,7 +114,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = {
- val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName), ad.pos, STATIC | SYNTHETIC)
+ val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName.toString), ad.pos, STATIC | SYNTHETIC)
val params = methSym.newSyntheticValueParams(List(ClassClass.tpe))
methSym setInfoAndEnter MethodType(params, MethodClass.tpe)
@@ -133,7 +127,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
/* ... */
- def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType match {
+ def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType() match {
case NO_CACHE =>
/* Implementation of the cache is as follows for method "def xyz(a: A, b: B)":
@@ -238,13 +232,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
BLOCK(
- VAR(methodCache) === getPolyCache,
+ VAL(methodCache) === getPolyCache,
IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
) ENDIF,
- VAR(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
+ VAL(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
IF (REF(methodSym) OBJ_NE NULL) .
THEN (Return(REF(methodSym)))
ELSE {
@@ -362,7 +356,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// reflective method call machinery
val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...)
def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
- def lookup = Apply(cache, List(qual1() GETCLASS)) // get Method object from cache
+ def lookup = Apply(cache, List(qual1() GETCLASS())) // get Method object from cache
def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
@@ -555,10 +549,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- if (forMSIL) savingStatics( transformTemplate(tree) )
- else transformTemplate(tree)
+ transformTemplate(tree)
- case Literal(c) if (c.tag == ClazzTag) && !forMSIL=>
+ case Literal(c) if c.tag == ClazzTag =>
val tpe = c.typeValue
typedWithPos(tree.pos) {
if (isPrimitiveValueClass(tpe.typeSymbol)) {
@@ -571,24 +564,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else tree
}
- /* MSIL requires that the stack is empty at the end of a try-block.
- * Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
- * store their result in a local variable. The catch blocks are adjusted as well.
- * The try tree is subsituted by a block whose result expression is read of that variable. */
- case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) =>
- def transformTry = {
- val tpe = theTry.tpe.widen
- val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
- def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
-
- val newBlock = assignBlock(block)
- val newCatches = for (CaseDef(pattern, guard, body) <- catches) yield
- (CASE(super.transform(pattern)) IF (super.transform(guard))) ==> assignBlock(body)
- val newTry = Try(newBlock, newCatches, super.transform(finalizer))
-
- typedWithPos(theTry.pos)(BLOCK(VAL(tempVar) === EmptyTree, newTry, Ident(tempVar)))
- }
- transformTry
/*
* This transformation should identify Scala symbol invocations in the tree and replace them
* with references to a static member. Also, whenever a class has at least a single symbol invocation
@@ -657,9 +632,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// create a symbol for the static field
val stfieldSym = (
currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL)
- setInfo SymbolClass.tpe
+ setInfoAndEnter SymbolClass.tpe
)
- currentClass.info.decls enter stfieldSym
// create field definition and initialization
val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 4891ef2fd1..a4a6c3ff31 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -24,8 +24,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
protected def newTransformer(unit: CompilationUnit): Transformer =
new ConstructorTransformer(unit)
- private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]
- private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]
+ private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]()
+ private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]()
class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
@@ -60,7 +60,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// The constructor parameter corresponding to an accessor
def parameter(acc: Symbol): Symbol =
- parameterNamed(nme.getterName(acc.originalName))
+ parameterNamed(nme.getterName(acc.originalName.toTermName))
// The constructor parameter with given name. This means the parameter
// has given name, or starts with given name, and continues with a `$` afterwards.
@@ -130,7 +130,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
if (from.name != nme.OUTER ||
from.tpe.typeSymbol.isPrimitiveValueClass) result
else localTyper.typedPos(to.pos) {
- IF (from OBJ_EQ NULL) THEN Throw(NullPointerExceptionClass.tpe) ELSE result
+ IF (from OBJ_EQ NULL) THEN Throw(NewFromConstructor(NPEConstructor)) ELSE result
}
}
@@ -188,7 +188,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Lazy vals don't get the assignment in the constructor.
if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
if (rhs != EmptyTree && !stat.symbol.isLazy) {
- val rhs1 = intoConstructor(stat.symbol, rhs);
+ val rhs1 = intoConstructor(stat.symbol, rhs)
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
}
@@ -281,7 +281,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
specializedStats find {
case Assign(sel @ Select(This(_), _), rhs) =>
( (sel.symbol hasFlag SPECIALIZED)
- && (nme.unspecializedName(nme.localToGetter(sel.symbol.name)) == nme.localToGetter(sym.name))
+ && (nme.unspecializedName(nme.localToGetter(sel.symbol.name.toTermName)) == nme.localToGetter(sym.name.toTermName))
)
case _ => false
}
@@ -399,7 +399,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def addGetter(sym: Symbol): Symbol = {
val getr = addAccessor(
- sym, nme.getterName(sym.name), getterFlags(sym.flags))
+ sym, nme.getterName(sym.name.toTermName), getterFlags(sym.flags))
getr setInfo MethodType(List(), sym.tpe)
defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym)))
getr
@@ -408,7 +408,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def addSetter(sym: Symbol): Symbol = {
sym setFlag MUTABLE
val setr = addAccessor(
- sym, nme.getterToSetter(nme.getterName(sym.name)), setterFlags(sym.flags))
+ sym, nme.getterToSetter(nme.getterName(sym.name.toTermName)), setterFlags(sym.flags))
setr setInfo MethodType(setr.newSyntheticValueParams(List(sym.tpe)), UnitClass.tpe)
defBuf += localTyper.typed {
//util.trace("adding setter def for "+setr) {
@@ -422,7 +422,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def ensureAccessor(sym: Symbol)(acc: => Symbol) =
if (sym.owner == clazz && !sym.isMethod && sym.isPrivate) { // there's an access to a naked field of the enclosing class
- var getr = acc
+ val getr = acc
getr makeNotPrivate clazz
getr
} else {
@@ -511,7 +511,6 @@ abstract class Constructors extends Transform with ast.TreeDSL {
sym = closureClass,
constrMods = Modifiers(0),
vparamss = List(List(outerFieldDef)),
- argss = ListOfNil,
body = List(applyMethodDef),
superPos = impl.pos)
}
@@ -529,7 +528,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
(pre ::: supercalls, rest)
}
- var (uptoSuperStats, remainingConstrStats) = splitAtSuper(constrStatBuf.toList)
+ val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList)
+ var remainingConstrStats = remainingConstrStats0
/** XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
* but excluding it includes too much. The constructor sequence being mimicked
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index cb5268c422..55b9ce1be9 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -10,6 +10,7 @@ import scala.reflect.internal.ClassfileConstants._
import scala.collection.{ mutable, immutable }
import symtab._
import Flags._
+import scala.reflect.internal.Mode._
abstract class Erasure extends AddInterfaces
with scala.reflect.internal.transform.Erasure
@@ -20,6 +21,7 @@ abstract class Erasure extends AddInterfaces
import global._
import definitions._
import CODE._
+ import treeInfo._
val phaseName: String = "erasure"
@@ -49,7 +51,7 @@ abstract class Erasure extends AddInterfaces
if (sym == ArrayClass) args foreach traverse
else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true
else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585
- else if (!sym.owner.isPackageClass) traverse(pre)
+ else if (!sym.isTopLevel) traverse(pre)
case PolyType(_, _) | ExistentialType(_, _) =>
result = true
case RefinedType(parents, _) =>
@@ -100,7 +102,7 @@ abstract class Erasure extends AddInterfaces
* unboxing some primitive types and further simplifications as they are done in jsig.
*/
val prepareSigMap = new TypeMap {
- def squashBoxed(tp: Type): Type = tp.normalize match {
+ def squashBoxed(tp: Type): Type = tp.dealiasWiden match {
case t @ RefinedType(parents, decls) =>
val parents1 = parents mapConserve squashBoxed
if (parents1 eq parents) tp
@@ -113,7 +115,7 @@ abstract class Erasure extends AddInterfaces
if (boxedClass contains t.typeSymbol) ObjectClass.tpe
else tp
}
- def apply(tp: Type): Type = tp.normalize match {
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case tp1 @ TypeBounds(lo, hi) =>
val lo1 = squashBoxed(apply(lo))
val hi1 = squashBoxed(apply(hi))
@@ -144,7 +146,7 @@ abstract class Erasure extends AddInterfaces
}
case tp1 @ MethodType(params, restpe) =>
val params1 = mapOver(params)
- val restpe1 = if (restpe.normalize.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
+ val restpe1 = if (restpe.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
if ((params1 eq params) && (restpe1 eq restpe)) tp1
else MethodType(params1, restpe1)
case tp1 @ RefinedType(parents, decls) =>
@@ -162,8 +164,8 @@ abstract class Erasure extends AddInterfaces
}
}
- private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
- case RefinedType(parents, _) => parents map (_.normalize)
+ private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.dealiasWiden match {
+ case RefinedType(parents, _) => parents map (_.dealiasWiden)
case tp => tp :: Nil
}
@@ -172,7 +174,7 @@ abstract class Erasure extends AddInterfaces
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure {
+ def javaSig(sym0: Symbol, info: Type): Option[String] = enteringErasure {
val isTraitSignature = sym0.enclClass.isTrait
def superSig(parents: List[Type]) = {
@@ -206,7 +208,7 @@ abstract class Erasure extends AddInterfaces
// Anything which could conceivably be a module (i.e. isn't known to be
// a type parameter or similar) must go through here or the signature is
// likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
- def fullNameInSig(sym: Symbol) = "L" + beforeIcode(sym.javaBinaryName)
+ def fullNameInSig(sym: Symbol) = "L" + enteringIcode(sym.javaBinaryName)
def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
val tp = tp0.dealias
@@ -340,15 +342,14 @@ abstract class Erasure extends AddInterfaces
case _ => tp.deconst
}
}
-
+
// Each primitive value class has its own getClass for ultra-precise class object typing.
private lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
ScalaValueClasses map (_.tpe member nme.getClass_)
)
-
+
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
-
// Methods on Any/Object which we rewrite here while we still know what
// is a primitive and what arrived boxed.
private lazy val interceptedMethods = poundPoundMethods ++ primitiveGetClassMethods
@@ -357,41 +358,10 @@ abstract class Erasure extends AddInterfaces
override def newTyper(context: Context) = new Eraser(context)
- private def safeToRemoveUnbox(cls: Symbol): Boolean =
- (cls == definitions.NullClass) || isBoxedValueClass(cls)
-
- /** An extractor object for unboxed expressions (maybe subsumed by posterasure?) */
- object Unboxed {
- def unapply(tree: Tree): Option[Tree] = tree match {
- case Apply(fn, List(arg)) if isUnbox(fn.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- Some(arg)
- case Apply(
- TypeApply(
- cast @ Select(
- Apply(
- sel @ Select(arg, acc),
- List()),
- asinstanceof),
- List(tpt)),
- List())
- if cast.symbol == Object_asInstanceOf &&
- tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox =>
- Some(arg)
- case _ =>
- None
- }
- }
-
- /** An extractor object for boxed expressions (maybe subsumed by posterasure?) */
- object Boxed {
- def unapply(tree: Tree): Option[Tree] = tree match {
- case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) =>
- Some(arg)
- case LabelDef(name, params, Boxed(rhs)) =>
- Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe)
- case _ =>
- None
+ private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = {
+ isUnbox(fn.symbol) && {
+ val cls = arg.tpe.typeSymbol
+ (cls == definitions.NullClass) || isBoxedValueClass(cls)
}
}
@@ -404,7 +374,7 @@ abstract class Erasure extends AddInterfaces
val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
var bridges = List[Tree]()
- val opc = beforeExplicitOuter {
+ val opc = enteringExplicitOuter {
new overridingPairs.Cursor(root) {
override def parents = List(root.info.firstParent)
override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym)
@@ -416,10 +386,10 @@ abstract class Erasure extends AddInterfaces
val member = opc.overriding
val other = opc.overridden
//println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
- if (beforeExplicitOuter(!member.isDeferred))
+ if (enteringExplicitOuter(!member.isDeferred))
checkPair(member, other)
- opc.next
+ opc.next()
}
(bridges, toBeRemoved)
}
@@ -446,11 +416,11 @@ abstract class Erasure extends AddInterfaces
sm"""bridge generated for member ${fulldef(member)}
|which overrides ${fulldef(other)}
|clashes with definition of $what;
- |both have erased type ${afterPostErasure(bridge.tpe)}""")
+ |both have erased type ${exitingPostErasure(bridge.tpe)}""")
}
for (bc <- root.baseClasses) {
if (settings.debug.value)
- afterPostErasure(println(
+ exitingPostErasure(println(
sm"""check bridge overrides in $bc
|${bc.info.nonPrivateDecl(bridge.name)}
|${site.memberType(bridge)}
@@ -459,13 +429,13 @@ abstract class Erasure extends AddInterfaces
def overriddenBy(sym: Symbol) =
sym.matchingSymbol(bc, site).alternatives filter (sym => !sym.isBridge)
- for (overBridge <- afterPostErasure(overriddenBy(bridge))) {
+ for (overBridge <- exitingPostErasure(overriddenBy(bridge))) {
if (overBridge == member) {
clashError("the member itself")
} else {
val overMembers = overriddenBy(member)
if (!overMembers.exists(overMember =>
- afterPostErasure(overMember.tpe =:= overBridge.tpe))) {
+ exitingPostErasure(overMember.tpe =:= overBridge.tpe))) {
clashError(fulldef(overBridge))
}
}
@@ -476,7 +446,7 @@ abstract class Erasure extends AddInterfaces
def checkPair(member: Symbol, other: Symbol) {
val otpe = specialErasure(root)(other.tpe)
- val bridgeNeeded = afterErasure (
+ val bridgeNeeded = exitingErasure (
!member.isMacro &&
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
@@ -489,7 +459,7 @@ abstract class Erasure extends AddInterfaces
if (!bridgeNeeded)
return
- val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ val newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
debuglog("generating bridge from %s (%s): %s to %s: %s".format(
@@ -504,9 +474,9 @@ abstract class Erasure extends AddInterfaces
if (!(member.tpe exists (_.typeSymbol.isDerivedValueClass)) ||
checkBridgeOverrides(member, other, bridge)) {
- afterErasure(root.info.decls enter bridge)
+ exitingErasure(root.info.decls enter bridge)
if (other.owner == root) {
- afterErasure(root.info.decls.unlink(other))
+ exitingErasure(root.info.decls.unlink(other))
toBeRemoved += other
}
@@ -515,7 +485,7 @@ abstract class Erasure extends AddInterfaces
}
}
- def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure {
+ def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = exitingErasure {
// type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`,
// calling `member` is not guaranteed to succeed in general, there's
// nothing we can do about this, except for an unapply: when this subtype test fails,
@@ -526,10 +496,10 @@ abstract class Erasure extends AddInterfaces
def maybeWrap(bridgingCall: Tree): Tree = {
val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic
(member.name == nme.unapply || member.name == nme.unapplySeq)
- && !afterErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
+ && !exitingErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
import CODE._
- val _false = FALSE_typed
+ val _false = FALSE
val pt = member.tpe.resultType
lazy val zero =
if (_false.tpe <:< pt) _false
@@ -579,12 +549,7 @@ abstract class Erasure extends AddInterfaces
val tree1 = tree.tpe match {
case ErasedValueType(tref) =>
val clazz = tref.sym
- tree match {
- case Unboxed(arg) if arg.tpe.typeSymbol == clazz =>
- log("shortcircuiting unbox -> box "+arg); arg
- case _ =>
- New(clazz, cast(tree, underlyingOfValueClass(clazz)))
- }
+ New(clazz, cast(tree, underlyingOfValueClass(clazz)))
case _ =>
tree.tpe.typeSymbol match {
case UnitClass =>
@@ -600,7 +565,7 @@ abstract class Erasure extends AddInterfaces
* This is important for specialization: calls to the super constructor should not box/unbox specialized
* fields (see TupleX). (ID)
*/
- case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
+ case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
arg
case _ =>
@@ -635,24 +600,18 @@ abstract class Erasure extends AddInterfaces
case _ =>
val tree1 = pt match {
case ErasedValueType(tref) =>
- tree match {
- case Boxed(arg) if arg.tpe.isInstanceOf[ErasedValueType] =>
- log("shortcircuiting box -> unbox "+arg)
- arg
- case _ =>
- val clazz = tref.sym
- log("not boxed: "+tree)
- lazy val underlying = underlyingOfValueClass(clazz)
- val tree0 =
- if (tree.tpe.typeSymbol == NullClass &&
- isPrimitiveValueClass(underlying.typeSymbol)) {
- // convert `null` directly to underlying type, as going
- // via the unboxed type would yield a NPE (see SI-5866)
- unbox1(tree, underlying)
- } else
- Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
- cast(tree0, pt)
- }
+ val clazz = tref.sym
+ log("not boxed: "+tree)
+ lazy val underlying = underlyingOfValueClass(clazz)
+ val tree0 =
+ if (tree.tpe.typeSymbol == NullClass &&
+ isPrimitiveValueClass(underlying.typeSymbol)) {
+ // convert `null` directly to underlying type, as going
+ // via the unboxed type would yield a NPE (see SI-5866)
+ unbox1(tree, underlying)
+ } else
+ Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
+ cast(tree0, pt)
case _ =>
pt.typeSymbol match {
case UnitClass =>
@@ -731,19 +690,11 @@ abstract class Erasure extends AddInterfaces
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_asInstanceOf =>
val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
- val qualClass = qual1.tpe.typeSymbol
-/*
- val targClass = targ.tpe.typeSymbol
- if (isNumericValueClass(qualClass) && isNumericValueClass(targClass))
- // convert numeric type casts
- atPos(tree.pos)(Apply(Select(qual1, "to" + targClass.name), List()))
- else
-*/
if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
val noNullCheckNeeded = targ.tpe match {
case ErasedValueType(tref) =>
- atPhase(currentRun.erasurePhase) {
+ enteringPhase(currentRun.erasurePhase) {
isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol)
}
case _ =>
@@ -751,7 +702,6 @@ abstract class Erasure extends AddInterfaces
}
if (noNullCheckNeeded) unbox(qual1, targ.tpe)
else {
- def nullConst = Literal(Constant(null)) setType NullClass.tpe
val untyped =
// util.trace("new asinstanceof test") {
gen.evalOnce(qual1, context.owner, context.unit) { qual =>
@@ -796,7 +746,7 @@ abstract class Erasure extends AddInterfaces
tree.symbol = NoSymbol
selectFrom(qual1)
} else if (isMethodTypeWithEmptyParams(qual1.tpe)) {
- assert(qual1.symbol.isStable, qual1.symbol);
+ assert(qual1.symbol.isStable, qual1.symbol)
val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
adaptMember(selectFrom(applied))
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
@@ -817,19 +767,19 @@ abstract class Erasure extends AddInterfaces
/** A replacement for the standard typer's adapt method.
*/
- override protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree =
+ override protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree =
adaptToType(tree, pt)
/** A replacement for the standard typer's `typed1` method.
*/
- override def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
val tree1 = try {
tree match {
case InjectDerivedValue(arg) =>
(tree.attachments.get[TypeRefAttachment]: @unchecked) match {
case Some(itype) =>
val tref = itype.tpe
- val argPt = atPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
+ val argPt = enteringPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
log(s"transforming inject $arg -> $tref/$argPt")
val result = typed(arg, mode, argPt)
log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
@@ -857,7 +807,7 @@ abstract class Erasure extends AddInterfaces
newCdef setType newCdef.body.tpe
}
def adaptBranch(branch: Tree): Tree =
- if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe);
+ if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe)
tree1 match {
case If(cond, thenp, elsep) =>
@@ -873,8 +823,7 @@ abstract class Erasure extends AddInterfaces
alt => alt == first || !(first.tpe looselyMatches alt.tpe)
}
if (tree.symbol ne sym1) {
- tree1.symbol = sym1
- tree1.tpe = sym1.tpe
+ tree1 setSymbol sym1 setType sym1.tpe
}
}
tree1
@@ -902,20 +851,20 @@ abstract class Erasure extends AddInterfaces
private def checkNoDoubleDefs(root: Symbol) {
def doubleDefError(sym1: Symbol, sym2: Symbol) {
// the .toString must also be computed at the earlier phase
- val tpe1 = afterRefchecks(root.thisType.memberType(sym1))
- val tpe2 = afterRefchecks(root.thisType.memberType(sym2))
+ val tpe1 = exitingRefchecks(root.thisType.memberType(sym1))
+ val tpe2 = exitingRefchecks(root.thisType.memberType(sym2))
if (!tpe1.isErroneous && !tpe2.isErroneous)
unit.error(
if (sym1.owner == root) sym1.pos else root.pos,
(if (sym1.owner == sym2.owner) "double definition:\n"
else if (sym1.owner == root) "name clash between defined and inherited member:\n"
else "name clash between inherited members:\n") +
- sym1 + ":" + afterRefchecks(tpe1.toString) +
+ sym1 + ":" + exitingRefchecks(tpe1.toString) +
(if (sym1.owner == root) "" else sym1.locationString) + " and\n" +
- sym2 + ":" + afterRefchecks(tpe2.toString) +
+ sym2 + ":" + exitingRefchecks(tpe2.toString) +
(if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) +
"\nhave same type" +
- (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterPostErasure(sym1.tpe)))
+ (if (exitingRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + exitingPostErasure(sym1.tpe)))
sym1.setInfo(ErrorType)
}
@@ -925,7 +874,7 @@ abstract class Erasure extends AddInterfaces
if (e.sym.isTerm) {
var e1 = decls.lookupNextEntry(e)
while (e1 ne null) {
- if (afterPostErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
+ if (exitingPostErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
e1 = decls.lookupNextEntry(e1)
}
}
@@ -933,16 +882,17 @@ abstract class Erasure extends AddInterfaces
}
val opc = new overridingPairs.Cursor(root) {
- override def exclude(sym: Symbol): Boolean =
- (!sym.isTerm || sym.isPrivate || super.exclude(sym)
- // specialized members have no type history before 'specialize', causing double def errors for curried defs
- || !sym.hasTypeAt(currentRun.refchecksPhase.id))
+ override def exclude(sym: Symbol): Boolean = (
+ !sym.isTerm || sym.isPrivate || super.exclude(sym)
+ // specialized members have no type history before 'specialize', causing double def errors for curried defs
+ || !sym.hasTypeAt(currentRun.refchecksPhase.id)
+ )
override def matches(sym1: Symbol, sym2: Symbol): Boolean =
- afterPostErasure(sym1.tpe =:= sym2.tpe)
+ exitingPostErasure(sym1.tpe =:= sym2.tpe)
}
while (opc.hasNext) {
- if (!afterRefchecks(
+ if (!exitingRefchecks(
root.thisType.memberType(opc.overriding) matches
root.thisType.memberType(opc.overridden))) {
debuglog("" + opc.overriding.locationString + " " +
@@ -951,7 +901,7 @@ abstract class Erasure extends AddInterfaces
opc.overridden.infosString)
doubleDefError(opc.overriding, opc.overridden)
}
- opc.next
+ opc.next()
}
}
@@ -961,8 +911,8 @@ abstract class Erasure extends AddInterfaces
for (member <- root.info.nonPrivateMember(other.name).alternatives) {
if (member != other &&
!(member hasFlag BRIDGE) &&
- afterErasure(member.tpe =:= other.tpe) &&
- !afterRefchecks(
+ exitingErasure(member.tpe =:= other.tpe) &&
+ !exitingRefchecks(
root.thisType.memberType(member) matches root.thisType.memberType(other))) {
debuglog("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString);
doubleDefError(member, other)
@@ -1084,6 +1034,7 @@ abstract class Erasure extends AddInterfaces
} else if (fn.symbol == Any_isInstanceOf) {
preEraseIsInstanceOf
} else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
+ // !!! Another spot where we produce overloaded types (see test run/t6301)
ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
} else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
@@ -1142,7 +1093,8 @@ abstract class Erasure extends AddInterfaces
SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
args)
}
- } else if (args.isEmpty && interceptedMethods(fn.symbol)) {
+ }
+ else if (args.isEmpty && interceptedMethods(fn.symbol)) {
if (poundPoundMethods.contains(fn.symbol)) {
// This is unattractive, but without it we crash here on ().## because after
// erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
@@ -1154,13 +1106,24 @@ abstract class Erasure extends AddInterfaces
case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
case _ =>
- global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
+ // Since we are past typer, we need to avoid creating trees carrying
+ // overloaded types. This logic is custom (and technically incomplete,
+ // although serviceable) for def hash. What is really needed is for
+ // the overloading logic presently hidden away in a few different
+ // places to be properly exposed so we can just call "resolveOverload"
+ // after typer. Until then:
+ val alts = ScalaRunTimeModule.info.member(nme.hash_).alternatives
+ def alt1 = alts find (_.info.paramTypes.head =:= qual.tpe)
+ def alt2 = ScalaRunTimeModule.info.member(nme.hash_) suchThat (_.info.paramTypes.head.typeSymbol == AnyClass)
+ val newTree = gen.mkRuntimeCall(nme.hash_, qual :: Nil) setSymbol (alt1 getOrElse alt2)
+
+ global.typer.typed(newTree)
}
} else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
} else if (primitiveGetClassMethods.contains(fn.symbol)) {
- // if we got here then we're trying to send a primitive getClass method to either
+ // if we got here then we're trying to send a primitive getClass method to either
// a) an Any, in which cage Object_getClass works because Any erases to object. Or
//
// b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
@@ -1271,13 +1234,12 @@ abstract class Erasure extends AddInterfaces
tree1 setType specialScalaErasure(tree1.tpe)
case ArrayValue(elemtpt, trees) =>
treeCopy.ArrayValue(
- tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform) setType null
+ tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType()
case DefDef(_, _, _, _, tpt, _) =>
- val result = super.transform(tree1) setType null
- tpt.tpe = specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
- result
+ try super.transform(tree1).clearType()
+ finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
case _ =>
- super.transform(tree1) setType null
+ super.transform(tree1).clearType()
}
}
}
@@ -1289,11 +1251,11 @@ abstract class Erasure extends AddInterfaces
override def transform(tree: Tree): Tree = {
val tree1 = preTransformer.transform(tree)
// log("tree after pretransform: "+tree1)
- afterErasure {
+ exitingErasure {
val tree2 = mixinTransformer.transform(tree1)
// debuglog("tree after addinterfaces: \n" + tree2)
- newTyper(rootContext(unit, tree, true)).typed(tree2)
+ newTyper(rootContext(unit, tree, erasedTypes = true)).typed(tree2)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 2f28a16416..124dd6c995 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -10,7 +10,6 @@ import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import matching.{ Patterns, ParallelMatching }
import scala.tools.nsc.settings.ScalaVersion
/** This class ...
@@ -19,15 +18,12 @@ import scala.tools.nsc.settings.ScalaVersion
* @version 1.0
*/
abstract class ExplicitOuter extends InfoTransform
- with Patterns
- with ParallelMatching
with TypingTransformers
with ast.TreeDSL
{
import global._
import definitions._
import CODE._
- import Debug.TRACE
/** The following flags may be set by this phase: */
override def phaseNewFlags: Long = notPROTECTED
@@ -78,19 +74,11 @@ abstract class ExplicitOuter extends InfoTransform
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
- case Bind(_, body) if toRemove(tree.symbol) =>
- TRACE("Dropping unused binding: " + tree.symbol)
- super.transform(body)
+ case Bind(_, body) if toRemove(tree.symbol) => super.transform(body)
case _ => super.transform(tree)
}
}
- /** Issue a migration warning for instance checks which might be on an Array and
- * for which the type parameter conforms to Seq, because these answers changed in 2.8.
- */
- def isArraySeqTest(lhs: Type, rhs: Type) =
- (ArrayClass.tpe <:< lhs.widen) && (rhs.widen matchesPattern SeqClass.tpe)
-
def outerAccessor(clazz: Symbol): Symbol = {
val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
@@ -99,7 +87,7 @@ abstract class ExplicitOuter extends InfoTransform
def newOuterAccessor(clazz: Symbol) = {
val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
- val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
+ val restpe = if (clazz.isTrait) clazz.outerClass.tpe_* else clazz.outerClass.thisType
sym expandName clazz
sym.referenced = clazz
@@ -118,7 +106,7 @@ abstract class ExplicitOuter extends InfoTransform
* <ol>
* <li>
* Add an outer parameter to the formal parameters of a constructor
- * in a inner non-trait class;
+ * in an inner non-trait class;
* </li>
* <li>
* Add a protected $outer field to an inner class which is
@@ -166,16 +154,13 @@ abstract class ExplicitOuter extends InfoTransform
var decls1 = decls
if (isInner(clazz) && !clazz.isInterface) {
decls1 = decls.cloneScope
- val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
- outerAcc expandName clazz
-
- decls1 enter newOuterAccessor(clazz)
+ decls1 enter newOuterAccessor(clazz) // 3
if (hasOuterField(clazz)) //2
decls1 enter newOuterField(clazz)
}
if (!clazz.isTrait && !parents.isEmpty) {
for (mc <- clazz.mixinClasses) {
- val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
+ val mixinOuterAcc: Symbol = exitingExplicitOuter(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
if (decls1 eq decls) decls1 = decls.cloneScope
val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
@@ -256,11 +241,6 @@ abstract class ExplicitOuter extends InfoTransform
* <blockquote><pre>`base'.$outer$$C1 ... .$outer$$Cn</pre></blockquote>
* which refers to the outer instance of class to of
* value base. The result is typed but not positioned.
- *
- * @param base ...
- * @param from ...
- * @param to ...
- * @return ...
*/
protected def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = {
//Console.println("outerPath from "+from+" to "+to+" at "+base+":"+base.tpe)
@@ -369,7 +349,7 @@ abstract class ExplicitOuter extends InfoTransform
*/
def outerAccessorDef: Tree = {
val outerAcc = outerAccessor(currentClass)
- var rhs: Tree =
+ val rhs: Tree =
if (outerAcc.isDeferred) EmptyTree
else This(currentClass) DOT outerField(currentClass)
@@ -405,74 +385,6 @@ abstract class ExplicitOuter extends InfoTransform
}
}
- // requires settings.XoldPatmat.value
- def matchTranslation(tree: Match) = {
- val Match(selector, cases) = tree
- var nselector = transform(selector)
-
- def makeGuardDef(vs: List[Symbol], guard: Tree) = {
- val gdname = unit.freshTermName("gd")
- val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC)
- val params = method newSyntheticValueParams vs.map(_.tpe)
- method setInfo new MethodType(params, BooleanClass.tpe)
-
- localTyper typed {
- DEF(method) === guard.changeOwner(currentOwner -> method).substituteSymbols(vs, params)
- }
- }
-
- val nguard = new ListBuffer[Tree]
- val ncases =
- for (CaseDef(pat, guard, body) <- cases) yield {
- // Strip out any unused pattern bindings up front
- val patternIdents = for (b @ Bind(_, _) <- pat) yield b.symbol
- val references: Set[Symbol] = Set(guard, body) flatMap { t => for (id @ Ident(name) <- t) yield id.symbol }
- val (used, unused) = patternIdents partition references
- val strippedPat = if (unused.isEmpty) pat else new RemoveBindingsTransformer(unused.toSet) transform pat
-
- val gdcall =
- if (guard == EmptyTree) EmptyTree
- else {
- val guardDef = makeGuardDef(used, guard)
- nguard += transform(guardDef) // building up list of guards
-
- localTyper typed (Ident(guardDef.symbol) APPLY (used map Ident))
- }
-
- (CASE(transform(strippedPat)) IF gdcall) ==> transform(body)
- }
-
- val (checkExhaustive, requireSwitch) = nselector match {
- case Typed(nselector1, tpt) =>
- val unchecked = tpt.tpe hasAnnotation UncheckedClass
- if (unchecked)
- nselector = nselector1
-
- // Don't require a tableswitch if there are 1-2 casedefs
- // since the matcher intentionally emits an if-then-else.
- (!unchecked, treeInfo.isSwitchAnnotation(tpt.tpe) && ncases.size > 2)
- case _ =>
- (true, false)
- }
-
- val t = atPos(tree.pos) {
- val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe)
- val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context)
-
- /* if @switch annotation is present, verify the resulting tree is a Match */
- if (requireSwitch) t_untyped match {
- case Block(_, Match(_, _)) => // ok
- case _ =>
- unit.error(tree.pos, "could not emit switch for @switch annotated match")
- }
-
- localTyper.typed(t_untyped, context.matchResultType)
- }
-
- if (nguard.isEmpty) t
- else Block(nguard.toList, t) setType t.tpe
- }
-
/** The main transformation method */
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
@@ -557,14 +469,10 @@ abstract class ExplicitOuter extends InfoTransform
})
super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
- // entry point for pattern matcher translation
- case m: Match if settings.XoldPatmat.value => // the new pattern matcher runs in its own phase right after typer
- matchTranslation(m)
-
// for the new pattern matcher
// base.<outer>.eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE
// TODO remove the synthetic `<outer>` method from outerFor??
- case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) if !settings.XoldPatmat.value =>
+ case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) =>
val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary?
val acc = outerAccessor(outerFor)
@@ -573,24 +481,17 @@ abstract class ExplicitOuter extends InfoTransform
// at least don't crash... this duplicates maybeOmittable from constructors
(acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) {
unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
- return transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
+ transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
} else {
// println("(base, acc)= "+(base, acc))
val outerSelect = localTyper typed Apply(Select(base, acc), Nil)
// achieves the same as: localTyper typed atPos(tree.pos)(outerPath(base, base.tpe.typeSymbol, outerFor.outerClass))
// println("(b, tpsym, outerForI, outerFor, outerClass)= "+ (base, base.tpe.typeSymbol, outerFor, sel.symbol.owner, outerFor.outerClass))
// println("outerSelect = "+ outerSelect)
- return transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
+ transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
}
case _ =>
- if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
- case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
- if (isArraySeqTest(qual.tpe, args.head.tpe))
- unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
- case _ => ()
- }
-
val x = super.transform(tree)
if (x.tpe eq null) x
else x setType transformInfo(currentOwner, x.tpe)
@@ -599,7 +500,7 @@ abstract class ExplicitOuter extends InfoTransform
/** The transformation method for whole compilation units */
override def transformUnit(unit: CompilationUnit) {
- afterExplicitOuter(super.transformUnit(unit))
+ exitingExplicitOuter(super.transformUnit(unit))
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index bc54054028..672d9d232a 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -8,9 +8,6 @@ package transform
import symtab._
import Flags._
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable
-import scala.tools.nsc.util.FreshNameCreator
-import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
/**
* Perform Step 1 in the inline classes SIP: Creates extension methods for all
@@ -23,7 +20,6 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
import global._ // the global environment
import definitions._ // standard classes and methods
- import typer.{ typed, atOwner } // methods to type trees
/** the following two members override abstract members in Transform */
val phaseName: String = "extmethods"
@@ -70,7 +66,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
}
/** Return the extension method that corresponds to given instance method `meth`. */
- def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) {
+ def extensionMethod(imeth: Symbol): Symbol = enteringPhase(currentRun.refchecksPhase) {
val companionInfo = companionModuleForce(imeth.owner).info
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
@@ -87,7 +83,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
|
| ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
|
- | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}" """)
matching.head
}
@@ -185,6 +181,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
// bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
// good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
}
+
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -206,7 +203,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val companion = origThis.companionModule
def makeExtensionMethodSymbol = {
- val extensionName = extensionNames(origMeth).head
+ val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
setAnnotations origMeth.annotations
@@ -256,12 +253,13 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
super.transformStats(stats, exprOwner) map {
- case md @ ModuleDef(_, _, _) if extensionDefs contains md.symbol =>
- val defns = extensionDefs(md.symbol).toList map (member =>
- atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(member))
- )
- extensionDefs -= md.symbol
- deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ defns))
+ case md @ ModuleDef(_, _, _) =>
+ val extraStats = extensionDefs remove md.symbol match {
+ case Some(defns) => defns.toList map (defn => atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(defn.duplicate)))
+ case _ => Nil
+ }
+ if (extraStats.isEmpty) md
+ else deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ extraStats))
case stat =>
stat
}
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index cd26f95958..44d39de205 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -8,28 +8,23 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
abstract class Flatten extends InfoTransform {
import global._
- import definitions._
+ import treeInfo.isQualifierSafeToElide
/** the following two members override abstract members in Transform */
val phaseName: String = "flatten"
- /** Updates the owning scope with the given symbol; returns the old symbol.
+ /** Updates the owning scope with the given symbol, unlinking any others.
*/
- private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
+ private def replaceSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten {
val scope = sym.owner.info.decls
- val old = scope lookup sym.name andAlso scope.unlink
+ val old = (scope lookupUnshadowedEntries sym.name).toList
+ old foreach (scope unlink _)
scope enter sym
-
- if (old eq NoSymbol)
- log(s"lifted ${sym.fullLocationString}")
- else
- log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.")
-
+ log(s"lifted ${sym.fullLocationString}" + ( if (old.isEmpty) "" else s" after unlinking $old from scope." ))
old
}
@@ -53,7 +48,7 @@ abstract class Flatten extends InfoTransform {
clazz.isClass && !clazz.isPackageClass && {
// Cannot flatten here: class A[T] { object B }
// was "at erasurePhase.prev"
- beforeErasure(clazz.typeParams.isEmpty)
+ enteringErasure(clazz.typeParams.isEmpty)
}
}
@@ -67,11 +62,11 @@ abstract class Flatten extends InfoTransform {
val decls1 = scopeTransform(clazz) {
val decls1 = newScope
if (clazz.isPackageClass) {
- afterFlatten { decls foreach (decls1 enter _) }
+ exitingFlatten { decls foreach (decls1 enter _) }
}
else {
val oldowner = clazz.owner
- afterFlatten { oldowner.info }
+ exitingFlatten { oldowner.info }
parents1 = parents mapConserve (this)
for (sym <- decls) {
@@ -90,7 +85,7 @@ abstract class Flatten extends InfoTransform {
val restp1 = apply(restp)
if (restp1 eq restp) tp else copyMethodType(tp, params, restp1)
case PolyType(tparams, restp) =>
- val restp1 = apply(restp);
+ val restp1 = apply(restp)
if (restp1 eq restp) tp else PolyType(tparams, restp1)
case _ =>
mapOver(tp)
@@ -122,8 +117,14 @@ abstract class Flatten extends InfoTransform {
case ClassDef(_, _, _, _) if sym.isNestedClass =>
liftedDefs(sym.enclosingTopLevelClass.owner) += tree
EmptyTree
- case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) =>
- afterFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym)))
+ case Select(qual, name) if sym.isStaticModule && !sym.isTopLevel =>
+ exitingFlatten {
+ atPos(tree.pos) {
+ val ref = gen.mkAttributedRef(sym)
+ if (isQualifierSafeToElide(qual)) ref
+ else Block(List(qual), ref).setType(tree.tpe) // need to execute the qualifier but refer directly to the lifted module.
+ }
+ }
case _ =>
tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index b6dbacaa29..dc321e26ca 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -10,11 +10,11 @@ package transform
* An InfoTransform contains a compiler phase that transforms trees and symbol infos -- making sure they stay consistent.
* The symbol info is transformed assuming it is consistent right before this phase.
* The info transformation is triggered by Symbol::rawInfo, which caches the results in the symbol's type history.
- * This way sym.info (during an atPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
+ * This way sym.info (during an enteringPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
* (If the transformed info had not been stored yet, rawInfo will compute the info by composing the info-transformers
* of the most recent phase before p, up to the transformer of the phase right before p.)
*
- * Concretely, atPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
+ * Concretely, enteringPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
*/
trait InfoTransform extends Transform {
import global.{Symbol, Type, InfoTransformer, infoTransformers}
diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
index 0af3cf732f..83dbc23014 100644
--- a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
@@ -1,9 +1,11 @@
package scala.tools.nsc
package transform
-trait InlineErasure { self: Erasure =>
-
+trait InlineErasure {
+ self: Erasure =>
+
+/**
import global._
import definitions._
-
-} \ No newline at end of file
+ **/
+}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 631468dd0c..60815da967 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -143,7 +143,7 @@ abstract class LambdaLift extends InfoTransform {
ss addEntry sym
renamable addEntry sym
changedFreeVars = true
- debuglog("" + sym + " is free in " + enclosure);
+ debuglog("" + sym + " is free in " + enclosure)
if (sym.isVariable) sym setFlag CAPTURED
}
!enclosure.isClass
@@ -161,7 +161,7 @@ abstract class LambdaLift extends InfoTransform {
private val freeVarTraverser = new Traverser {
override def traverse(tree: Tree) {
try { //debug
- val sym = tree.symbol;
+ val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
liftedDefs(tree.symbol) = Nil
@@ -245,10 +245,10 @@ abstract class LambdaLift extends InfoTransform {
freshen(sym.name + nme.NAME_JOIN_STRING + sym.owner.name + nme.NAME_JOIN_STRING)
} else {
// SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?)
- // Generating a a unique name, mangled with the enclosing class name, avoids a VerifyError
+ // Generating a unique name, mangled with the enclosing class name, avoids a VerifyError
// in the case that a sub-class happens to lifts out a method with the *same* name.
- val name = freshen(sym.name + nme.NAME_JOIN_STRING)
- if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name, sym.enclClass)
+ val name = freshen("" + sym.name + nme.NAME_JOIN_STRING)
+ if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name.toTermName, sym.enclClass)
else name
}
}
@@ -290,7 +290,7 @@ abstract class LambdaLift extends InfoTransform {
proxies(owner) =
for (fv <- freeValues.toList) yield {
val proxyName = proxyNames.getOrElse(fv, fv.name)
- val proxy = owner.newValue(proxyName, owner.pos, newFlags) setInfo fv.info
+ val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags) setInfo fv.info
if (owner.isClass) owner.info.decls enter proxy
proxy
}
@@ -456,7 +456,7 @@ abstract class LambdaLift extends InfoTransform {
}
case arg => arg
}
-
+
/** Wrap expr argument in new *Ref(..) constructor. But try/catch
* is a problem because a throw will clear the stack and post catch
* we would expect the partially-constructed object to be on the stack
@@ -464,11 +464,11 @@ abstract class LambdaLift extends InfoTransform {
* search for "leaf" result expressions where we know its safe
* to put the new *Ref(..) constructor or, if all else fails, transform
* an expr to { val temp=expr; new *Ref(temp) }.
- * The reason we narrowly look for try/catch in captured var definitions
+ * The reason we narrowly look for try/catch in captured var definitions
* is because other try/catch expression have already been lifted
* see SI-6863
*/
- def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
+ def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos)(expr match {
// very simple expressions can be wrapped in a new *Ref(expr) because they can't have
// a try/catch in final expression position.
case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
@@ -488,9 +488,9 @@ abstract class LambdaLift extends InfoTransform {
debuglog("assigning expr to temp: " + (expr.pos))
val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
val tempDef = ValDef(tempSym, expr) setPos expr.pos
- val tempRef = Ident(tempSym) setPos expr.pos
+ val tempRef = Ident(tempSym) setPos expr.pos
Block(tempDef, New(sym.tpe, tempRef))
- }}
+ })
def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 21213cf9d9..e6c9afb042 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -68,7 +68,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
curTree = tree
tree match {
-
+
case Block(_, _) =>
val block1 = super.transform(tree)
val Block(stats, expr) = block1
@@ -79,7 +79,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
List(stat)
})
treeCopy.Block(block1, stats1, expr)
-
+
case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) {
val enclosingClassOrDummyOrMethod = {
@@ -100,9 +100,9 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym)
sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
(rhs1, sDef)
- } else
+ } else
(transform(rhs), EmptyTree)
-
+
val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1
}
@@ -189,10 +189,10 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
case _ => prependStats(bmps, rhs)
}
}
-
+
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): Tree = {
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE)
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE)
defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
defSym.owner = lzyVal.owner
debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal")
@@ -201,8 +201,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym
}
-
-
+
+
def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): (Tree, Tree) = {
val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
@@ -221,7 +221,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
* { if ((bitmap&n & MASK) == 0) this.l$compute()
* else l$
- *
+ *
* def l$compute() = { synchronized(enclosing_class_or_dummy) {
* if ((bitmap$n & MASK) == 0) {
* l$ = <rhs>
@@ -278,7 +278,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
bmps(n)
else {
val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe)
- beforeTyper {
+ enteringTyper {
sym addAnnotation VolatileAttr
}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index c9c68d080d..988e80aa77 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -27,7 +27,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
/** Map a lazy, mixedin field accessor to it's trait member accessor */
- private val initializer = perRunCaches.newMap[Symbol, Symbol]
+ private val initializer = perRunCaches.newMap[Symbol, Symbol]()
// --------- helper functions -----------------------------------------------
@@ -68,7 +68,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* maps all other types to themselves.
*/
private def toInterface(tp: Type): Type =
- beforeMixin(tp.typeSymbol.toInterface).tpe
+ enteringMixin(tp.typeSymbol.toInterface).tpe
private def isFieldWithBitmap(field: Symbol) = {
field.info // ensure that nested objects are transformed
@@ -102,7 +102,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private val toInterfaceMap = new TypeMap {
def apply(tp: Type): Type = mapOver( tp match {
case TypeRef(pre, sym, args) if sym.isImplClass =>
- typeRef(pre, beforeMixin(sym.toInterface), args)
+ typeRef(pre, enteringMixin(sym.toInterface), args)
case _ => tp
})
}
@@ -119,14 +119,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param mixinClass The mixin class that produced the superaccessor
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
- afterPickler {
+ exitingPickler {
var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
var sym: Symbol = NoSymbol
debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
" " + mixinClass + " " + base.info.baseClasses + "/" + bcs)
while (!bcs.isEmpty && sym == NoSymbol) {
if (settings.debug.value) {
- val other = bcs.head.info.nonPrivateDecl(member.name);
+ val other = bcs.head.info.nonPrivateDecl(member.name)
debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
" " + other.isDeferred)
}
@@ -148,7 +148,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym =>
isConcreteAccessor(sym) &&
!sym.hasFlag(MIXEDIN) &&
- matchesType(sym.tpe, member.tpe, true))
+ matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true))
}
( bcs.head != member.owner
&& (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
@@ -165,7 +165,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = {
- val newSym = beforeErasure {
+ val newSym = enteringErasure {
// since we used `mixinMember` from the interface that represents the trait that's
// being mixed in, have to instantiate the interface type params (that may occur in mixinMember's
// info) as they are seen from the class. We can't use the member that we get from the
@@ -197,9 +197,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - lazy fields don't get a setter.
*/
def addLateInterfaceMembers(clazz: Symbol) {
- def makeConcrete(member: Symbol) =
- member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED)
-
if (treatedClassInfos(clazz) != clazz.info) {
treatedClassInfos(clazz) = clazz.info
assert(phase == currentRun.mixinPhase, phase)
@@ -210,14 +207,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE))
val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE )
// TODO preserve pre-erasure info?
- clazz.newMethod(nme.getterName(field.name), field.pos, newFlags) setInfo MethodType(Nil, field.info)
+ clazz.newMethod(nme.getterName(field.name.toTermName), field.pos, newFlags) setInfo MethodType(Nil, field.info)
}
/** Create a new setter. Setters are never private or local. They are
* always accessors and deferred. */
def newSetter(field: Symbol): Symbol = {
//println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE))
- val setterName = nme.getterToSetter(nme.getterName(field.name))
+ val setterName = nme.getterToSetter(nme.getterName(field.name.toTermName))
val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
val setter = clazz.newMethod(setterName, field.pos, newFlags)
// TODO preserve pre-erasure info?
@@ -245,7 +242,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
}
}
- debuglog("new defs of " + clazz + " = " + clazz.info.decls);
+ debuglog("new defs of " + clazz + " = " + clazz.info.decls)
}
}
@@ -276,7 +273,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val imember = member overriddenSymbol mixinInterface
imember overridingSymbol clazz match {
case NoSymbol =>
- if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember)
+ if (clazz.info.findMember(member.name, 0, lateDEFERRED, stableOnly = false).alternatives contains imember)
cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
case _ =>
}
@@ -292,7 +289,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
for (mixinMember <- mixinClass.info.decls) {
if (isConcreteAccessor(mixinMember)) {
if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
- debugwarn("!!! is overridden val: "+mixinMember.fullLocationString)
+ devWarning(s"Overridden concrete accessor: ${mixinMember.fullLocationString}")
else {
// mixin field accessors
val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
@@ -311,14 +308,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// mixinMember is a value of type unit. No field needed
;
case _ => // otherwise mixin a field as well
- // atPhase: the private field is moved to the implementation class by erasure,
+ // enteringPhase: the private field is moved to the implementation class by erasure,
// so it can no longer be found in the mixinMember's owner (the trait)
- val accessed = beforePickler(mixinMember.accessed)
+ val accessed = enteringPickler(mixinMember.accessed)
// #3857, need to retain info before erasure when cloning (since cloning only
// carries over the current entry in the type history)
- val sym = beforeErasure {
+ val sym = enteringErasure {
// so we have a type history entry before erasure
- clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+ clazz.newValue(nme.getterToLocal(mixinMember.name.toTermName), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
}
sym updateInfo mixinMember.tpe.resultType // info at current phase
@@ -379,35 +376,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
var parents1 = parents
var decls1 = decls
if (!clazz.isPackageClass) {
- afterMixin(clazz.owner.info)
+ exitingMixin(clazz.owner.info)
if (clazz.isImplClass) {
clazz setFlag lateMODULE
var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName)
- if (sourceModule != NoSymbol) {
- sourceModule setPos sym.pos
- if (sourceModule.flags != MODULE) {
- log("!!! Directly setting sourceModule flags from %s to MODULE".format(flagsToString(sourceModule.flags)))
- sourceModule.flags = MODULE
- }
- }
- else {
+ if (sourceModule == NoSymbol) {
sourceModule = (
clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE)
setModuleClass sym.asInstanceOf[ClassSymbol]
)
clazz.owner.info.decls enter sourceModule
}
+ else {
+ sourceModule setPos sym.pos
+ if (sourceModule.flags != MODULE) {
+ log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString))
+ sourceModule.flags = MODULE
+ }
+ }
sourceModule setInfo sym.tpe
// Companion module isn't visible for anonymous class at this point anyway
- assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass,
- clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe)
+ assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, s"$clazz has no sourceModule: $sym ${sym.tpe}")
parents1 = List()
decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*)
} else if (!parents.isEmpty) {
parents1 = parents.head :: (parents.tail map toInterface)
}
}
- //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
+ //decls1 = enteringPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
if ((parents1 eq parents) && (decls1 eq decls)) tp
else ClassInfoType(parents1, decls1, clazz)
@@ -437,7 +433,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Assign(lhs, rhs) => traverse(rhs) // assignments don't count
case _ =>
- if (tree.hasSymbol && tree.symbol != NoSymbol) {
+ if (tree.hasSymbolField && tree.symbol != NoSymbol) {
val sym = tree.symbol
if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod))
&& sym.isPrivate
@@ -515,7 +511,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - create a new method definition that also has a `self` parameter
* (which comes first) Iuli: this position is assumed by tail call elimination
* on a different receiver. Storing a new 'this' assumes it is located at
- * index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL.
+ * index 0 in the local variable table. See 'STORE_THIS' and GenASM.
* - Map implementation class types in type-apply's to their interfaces
* - Remove all fields in implementation classes
*/
@@ -524,7 +520,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Template(parents, self, body) =>
localTyper = erasure.newTyper(rootContext.make(tree, currentOwner))
- afterMixin(currentOwner.owner.info)//todo: needed?
+ exitingMixin(currentOwner.owner.info)//todo: needed?
if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner))
addMixedinMembers(currentOwner, unit)
@@ -543,17 +539,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else EmptyTree
}
else {
- if (currentOwner.isTrait && sym.isSetter && !beforePickler(sym.isDeferred)) {
+ if (currentOwner.isTrait && sym.isSetter && !enteringPickler(sym.isDeferred)) {
sym.addAnnotation(TraitSetterAnnotationClass)
}
tree
}
+ // !!! What is this doing, and why is it only looking for exactly
+ // one type parameter? It would seem to be
+ // "Map implementation class types in type-apply's to their interfaces"
+ // from the comment on preTransform, but is there some way we should know
+ // that impl class types in type applies can only appear in single
+ // type parameter type constructors?
case Apply(tapp @ TypeApply(fn, List(arg)), List()) =>
if (arg.tpe.typeSymbol.isImplClass) {
val ifacetpe = toInterface(arg.tpe)
- arg.tpe = ifacetpe
- tapp.tpe = MethodType(List(), ifacetpe)
- tree.tpe = ifacetpe
+ arg setType ifacetpe
+ tapp setType MethodType(Nil, ifacetpe)
+ tree setType ifacetpe
}
tree
case ValDef(_, _, _, _) if currentOwner.isImplClass =>
@@ -590,8 +592,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree
}
- /** Create a static reference to given symbol <code>sym</code> of the
- * form <code>M.sym</code> where M is the symbol's implementation module.
+ /** Create a static reference to given symbol `sym` of the
+ * form `M.sym` where M is the symbol's implementation module.
*/
private def staticRef(sym: Symbol): Tree = {
sym.owner.info //todo: needed?
@@ -673,8 +675,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs))
/** Add `newdefs` to `stats`, removing any abstract method definitions
- * in <code>stats</code> that are matched by some symbol defined in
- * <code>newDefs</code>.
+ * in `stats` that are matched by some symbol defined in
+ * `newDefs`.
*/
def add(stats: List[Tree], newDefs: List[Tree]) = {
val newSyms = newDefs map (_.symbol)
@@ -702,7 +704,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*)
val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType)
- deriveDefDef(stat)(_ => beforeMixin(transform(rhs1)))
+ deriveDefDef(stat)(_ => enteringMixin(transform(rhs1)))
case _ =>
stat
}
@@ -713,7 +715,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = {
val category = bitmapCategory(field)
- val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field))
+ val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field)).toTermName
val sym = clazz0.info.decl(bitmapName)
assert(!sym.isOverloaded, sym)
@@ -721,7 +723,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def createBitmap: Symbol = {
val bitmapKind = bitmapKindForCategory(category)
val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
- beforeTyper(sym addAnnotation VolatileAttr)
+ enteringTyper(sym addAnnotation VolatileAttr)
category match {
case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
@@ -777,7 +779,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE)
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE)
val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
@@ -803,7 +805,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
override def transform(tree: Tree): Tree =
- if (tree.hasSymbol && from.contains(tree.symbol))
+ if (tree.hasSymbolField && from.contains(tree.symbol))
super.transform(tree.duplicate)
else super.transform(tree.duplicate)
@@ -819,8 +821,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Private fields used only in this initializer are subsequently set to null.
*
* @param clazz The class symbol
+ * @param lzyVal The symbol of this lazy field
* @param init The tree which initializes the field ( f = <rhs> )
- * @param fieldSym The symbol of this lazy field
* @param offset The offset of this field in the flags bitmap
*
* The result will be a tree of the form
@@ -853,7 +855,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset, lzyVal)
val kind = bitmapKind(lzyVal)
val mask = maskForOffset(offset, lzyVal, kind)
- def cond = mkTest(clazz, mask, bitmapSym, true, kind)
+ def cond = mkTest(clazz, mask, bitmapSym, equalToZero = true, kind)
val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT)
@@ -870,7 +872,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
- abort("Invalid getter " + rhs + " for module in class " + clazz)
+ abort(s"Invalid getter $rhs for module in $clazz")
}
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
@@ -878,11 +880,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset, sym)
val kind = bitmapKind(sym)
val mask = maskForOffset(offset, sym, kind)
- val msg = "Uninitialized field: " + unit.source + ": " + pos.line
+ val msg = s"Uninitialized field: ${unit.source}: ${pos.line}"
val result =
- IF (mkTest(clazz, mask, bitmapSym, false, kind)) .
+ IF (mkTest(clazz, mask, bitmapSym, equalToZero = false, kind)) .
THEN (retVal) .
- ELSE (THROW(UninitializedErrorClass, LIT(msg)))
+ ELSE (Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg))))
typedPos(pos)(BLOCK(result, retVal))
}
@@ -978,12 +980,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def addInitBits(clazz: Symbol, rhs: Tree): Tree =
new AddInitBitsTransformer(clazz) transform rhs
- def isCheckInitField(field: Symbol) =
- needsInitFlag(field) && !field.isDeferred
-
- def superClassesToCheck(clazz: Symbol) =
- clazz.ancestors filterNot (_ hasFlag TRAIT | JAVA)
-
// begin addNewDefs
/** Fill the map from fields to offset numbers.
@@ -1044,16 +1040,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
// if class is not a trait add accessor definitions
else if (!clazz.isTrait) {
+ // This needs to be a def to avoid sharing trees
+ def accessedRef = accessedReference(sym)
if (sym.hasAccessorFlag && (!sym.isDeferred || sym.hasFlag(lateDEFERRED))) {
// add accessor definitions
addDefDef(sym, {
- val accessedRef = accessedReference(sym)
if (sym.isSetter) {
if (isOverriddenSetter(sym)) UNIT
else accessedRef match {
- case Literal(_) => accessedRef
- case _ =>
- val init = Assign(accessedRef, Ident(sym.firstParam))
+ case ref @ Literal(_) => ref
+ case ref =>
+ val init = Assign(ref, Ident(sym.firstParam))
val getter = sym.getter(clazz)
if (!needsInitFlag(getter)) init
@@ -1068,11 +1065,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
// add modules
- val vdef = gen.mkModuleVarDef(sym)
- addDef(position(sym), vdef)
+ val vsym = sym.owner.newModuleVarSymbol(sym)
+ addDef(position(sym), ValDef(vsym))
- val rhs = gen.newModule(sym, vdef.symbol.tpe)
- val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
+ // !!! TODO - unravel the enormous duplication between this code and
+ // eliminateModuleDefs in RefChecks.
+ val rhs = gen.newModule(sym, vsym.tpe)
+ val assignAndRet = gen.mkAssignAndReturn(vsym, rhs)
val attrThis = gen.mkAttributedThis(clazz)
val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List())
@@ -1090,7 +1089,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// add forwarders
assert(sym.alias != NoSymbol, sym)
// debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString)
- if (!sym.isTermMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
+ if (!sym.isMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
}
}
}
@@ -1135,7 +1134,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// change every node type that refers to an implementation class to its
// corresponding interface, unless the node's symbol is an implementation class.
if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
- tree.tpe = toInterface(tree.tpe)
+ tree modifyType toInterface
tree match {
case templ @ Template(parents, self, body) =>
@@ -1151,9 +1150,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
qual
case Apply(Select(qual, _), args) =>
- /** Changes <code>qual.m(args)</code> where m refers to an implementation
+ /** Changes `qual.m(args)` where m refers to an implementation
* class method to Q.m(S, args) where Q is the implementation module of
- * <code>m</code> and S is the self parameter for the call, which
+ * `m` and S is the self parameter for the call, which
* is determined as follows:
* - if qual != super, qual itself
* - if qual == super, and we are in an implementation class,
@@ -1164,7 +1163,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def implSym = implClass(sym.owner).info.member(sym.name)
assert(target ne NoSymbol,
List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym,
- beforePrevPhase(implSym.tpe), phase) mkString " "
+ enteringPrevPhase(implSym.tpe), phase) mkString " "
)
typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args))
}
@@ -1193,7 +1192,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
}
else {
- staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
+ staticCall(enteringPrevPhase(sym.overridingSymbol(implClass(sym.owner))))
}
}
else {
@@ -1211,7 +1210,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree
case Select(qual, name) if sym.owner.isImplClass && !isStaticOnly(sym) =>
- assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, flagsToString(sym.flags)))
+ assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, sym.flagString))
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
@@ -1258,7 +1257,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val tree1 = super.transform(preTransform(tree))
// localTyper needed when not flattening inner classes. parts after an
// inner class will otherwise be typechecked with a wrong scope
- try afterMixin(postTransform(tree1))
+ try exitingMixin(postTransform(tree1))
finally localTyper = saved
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 67be81bd3c..2610679542 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -31,11 +31,11 @@ abstract class OverridingPairs {
private val self = base.thisType
/** Symbols to exclude: Here these are constructors, private locals,
- * and bridges. But it may be refined in subclasses.
+ * and hidden symbols, including bridges. But it may be refined in subclasses.
*
*/
protected def exclude(sym: Symbol): Boolean =
- sym.isConstructor || sym.isPrivateLocal || sym.hasFlag(BRIDGE)
+ sym.isConstructor || sym.isPrivateLocal || sym.isArtifact
/** The parents of base (may also be refined).
*/
@@ -86,10 +86,10 @@ abstract class OverridingPairs {
{ def fillDecls(bcs: List[Symbol], deferredflag: Int) {
if (!bcs.isEmpty) {
fillDecls(bcs.tail, deferredflag)
- var e = bcs.head.info.decls.elems;
+ var e = bcs.head.info.decls.elems
while (e ne null) {
if (e.sym.getFlag(DEFERRED) == deferredflag.toLong && !exclude(e.sym))
- decls enter e.sym;
+ decls enter e.sym
e = e.next
}
}
@@ -134,7 +134,7 @@ abstract class OverridingPairs {
private val subParents = new Array[BitSet](size)
{ for (i <- List.range(0, size))
- subParents(i) = new BitSet(size);
+ subParents(i) = new BitSet(size)
for (p <- parents) {
val pIndex = index(p.typeSymbol)
if (pIndex >= 0)
@@ -190,7 +190,7 @@ abstract class OverridingPairs {
if (nextEntry ne null) {
do {
do {
- nextEntry = decls.lookupNextEntry(nextEntry);
+ nextEntry = decls.lookupNextEntry(nextEntry)
/* DEBUG
if ((nextEntry ne null) &&
!(nextEntry.sym hasFlag PRIVATE) &&
@@ -208,19 +208,19 @@ abstract class OverridingPairs {
// overriding and nextEntry.sym
} while ((nextEntry ne null) && (hasCommonParentAsSubclass(overriding, nextEntry.sym)))
if (nextEntry ne null) {
- overridden = nextEntry.sym;
+ overridden = nextEntry.sym
//Console.println("yield: " + overriding + overriding.locationString + " / " + overridden + overridden.locationString);//DEBUG
} else {
do {
curEntry = curEntry.next
- } while ((curEntry ne null) && (visited contains curEntry));
+ } while ((curEntry ne null) && (visited contains curEntry))
nextEntry = curEntry
- next
+ next()
}
}
}
}
- next
+ next()
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
index 3ef32caa2c..2a86d711f1 100644
--- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -9,10 +9,10 @@ package transform
* performs peephole optimizations.
*/
trait PostErasure extends InfoTransform with TypingTransformers {
-
val global: Global
+
import global._
- import definitions._
+ import treeInfo._
val phaseName: String = "posterasure"
@@ -21,51 +21,33 @@ trait PostErasure extends InfoTransform with TypingTransformers {
object elimErasedValueType extends TypeMap {
def apply(tp: Type) = tp match {
- case ConstantType(Constant(tp: Type)) =>
- ConstantType(Constant(apply(tp)))
- case ErasedValueType(tref) =>
- atPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref))
- case _ => mapOver(tp)
+ case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp)))
+ case ErasedValueType(tref) => enteringErasure(erasure.erasedValueClassArg(tref))
+ case _ => mapOver(tp)
}
}
def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree) = {
+ def finish(res: Tree) = logResult(s"Posterasure reduction\n Old: $tree\n New")(res)
+
+ /** We use the name of the operation being performed and not the symbol
+ * itself because the symbol hails from the boxed class, and this transformation
+ * exists to operate directly on the values. So we are for instance looking
+ * up == on an lhs of type Int, whereas the symbol which has been passed in
+ * is from java.lang.Integer.
+ */
+ def binop(lhs: Tree, op: Symbol, rhs: Tree) =
+ finish(localTyper typed (Apply(Select(lhs, op.name) setPos tree.pos, rhs :: Nil) setPos tree.pos))
- override def transform(tree: Tree) =
super.transform(tree) setType elimErasedValueType(tree.tpe) match {
- case // new C(arg).underlying ==> arg
- Apply(sel @ Select(
- Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)),
- acc), List())
- if atPhase(currentRun.erasurePhase) {
- tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox
- } =>
- if (settings.debug.value) log("Removing "+tree+" -> "+arg)
- arg
- case // new C(arg1) == new C(arg2) ==> arg1 == arg2
- Apply(sel @ Select(
- Apply(Select(New(tpt1), nme.CONSTRUCTOR), List(arg1)),
- cmp),
- List(Apply(Select(New(tpt2), nme.CONSTRUCTOR), List(arg2))))
- if atPhase(currentRun.erasurePhase) {
- tpt1.tpe.typeSymbol.isDerivedValueClass &&
- (sel.symbol == Object_== || sel.symbol == Object_!=) &&
- tpt2.tpe.typeSymbol == tpt1.tpe.typeSymbol
- } =>
- val result = Apply(Select(arg1, cmp) setPos sel.pos, List(arg2)) setPos tree.pos
- log("shortcircuiting equality "+tree+" -> "+result)
- localTyper.typed(result)
-
- case // arg.asInstanceOf[T] ==> arg if arg.tpe == T
- Apply(TypeApply(cast @ Select(arg, asinstanceof), List(tpt)), List())
- if cast.symbol == Object_asInstanceOf && arg.tpe =:= tpt.tpe => // !!! <:< ?
- if (settings.debug.value) log("Shortening "+tree+" -> "+arg)
- arg
- case tree1 =>
- tree1
+ case AsInstanceOf(v, tpe) if v.tpe <:< tpe => finish(v) // x.asInstanceOf[X] ==> x
+ case ValueClass.BoxAndUnbox(v) => finish(v) // (new B(v)).unbox ==> v
+ case ValueClass.BoxAndCompare(v1, op, v2) => binop(v1, op, v2) // new B(v1) == new B(v2) ==> v1 == v2
+ case tree => tree
}
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index 44d8860916..cffb483072 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -11,9 +11,8 @@ package transform
abstract class SampleTransform extends Transform {
// inherits abstract value `global` and class `Phase` from Transform
- import global._ // the global environment
- import definitions._ // standard classes and methods
- import typer.{typed, atOwner} // methods to type trees
+ import global._ // the global environment
+ import typer.typed // method to type trees
/** the following two members override abstract members in Transform */
val phaseName: String = "sample-phase"
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 232148676c..0cd7f516ef 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -10,6 +10,7 @@ import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
import scala.language.postfixOps
import scala.language.existentials
+import scala.annotation.tailrec
/** Specialize code on types.
*
@@ -101,7 +102,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Concrete methods that use a specialized type, or override such methods. */
private val concreteSpecMethods = perRunCaches.newWeakSet[Symbol]()
- private def specializedTypes(tps: List[Symbol]) = tps filter (_.isSpecialized)
private def specializedOn(sym: Symbol): List[Symbol] = {
sym getAnnotation SpecializedClass match {
case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol)
@@ -119,6 +119,22 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
+ @annotation.tailrec private def findSymbol[T](candidates: List[T], f: T => Symbol): Symbol = {
+ if (candidates.isEmpty) NoSymbol
+ else f(candidates.head) match {
+ case NoSymbol => findSymbol(candidates.tail, f)
+ case sym => sym
+ }
+ }
+ private def hasNewParents(tree: Tree) = {
+ val parents = tree.symbol.info.parents
+ val prev = enteringPrevPhase(tree.symbol.info.parents)
+ (parents != prev) && {
+ debuglog(s"$tree parents changed from: $prev to: $parents")
+ true
+ }
+ }
+
// If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
@@ -169,16 +185,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Returns the generic class that was specialized to 'sClass', or
- * 'sClass' itself if sClass is not a specialized subclass.
- */
- def genericClass(sClass: Symbol): Symbol =
- if (sClass.isSpecialized) sClass.superClass
- else sClass
-
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
- def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesSym(sym1: Symbol) = sym.info =:= sym1.info
def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
}
private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
@@ -207,8 +216,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* type bounds of other @specialized type parameters (and not in its result type).
*/
def degenerate = false
-
- def isAccessor = false
}
/** Symbol is a special overloaded method of 'original', in the environment env. */
@@ -226,11 +233,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def target = t
}
- /** Symbol is a specialized accessor for the `target` field. */
- case class SpecializedAccessor(target: Symbol) extends SpecializedInfo {
- override def isAccessor = true
+ /** Symbol is a special overload of the super accessor. */
+ case class SpecialSuperAccessor(t: Symbol) extends SpecializedInfo {
+ def target = t
}
+ /** Symbol is a specialized accessor for the `target` field. */
+ case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { }
+
/** Symbol is a specialized method whose body should be the target's method body. */
case class Implementation(target: Symbol) extends SpecializedInfo
@@ -268,9 +278,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializedParams(sym: Symbol): List[Symbol] =
sym.info.typeParams filter (_.isSpecialized)
- def splitParams(tps: List[Symbol]) =
- tps partition (_.isSpecialized)
-
/** Given an original class symbol and a list of types its type parameters are instantiated at
* returns a list of type parameters that should remain in the TypeRef when instantiating a
* specialized type.
@@ -317,11 +324,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = {
if (nme.INITIALIZER == name || (types1.isEmpty && types2.isEmpty))
- name
+ name.toTermName
else if (nme.isSetterName(name))
- nme.getterToSetter(specializedName(nme.setterToGetter(name), types1, types2))
+ nme.getterToSetter(specializedName(nme.setterToGetter(name.toTermName), types1, types2))
else if (nme.isLocalName(name))
- nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
+ nme.getterToLocal(specializedName(nme.localToGetter(name.toTermName), types1, types2))
else {
val (base, cs, ms) = nme.splitSpecializedName(name)
newTermName(base.toString + "$"
@@ -397,11 +404,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => false
})
def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = {
- val buf = Set.newBuilder[Symbol]
- tpes foreach (tp => buf ++= specializedTypeVars(tp))
- buf.result
+ @tailrec def loop(result: immutable.Set[Symbol], xs: List[Type]): immutable.Set[Symbol] = {
+ if (xs.isEmpty) result
+ else loop(result ++ specializedTypeVars(xs.head), xs.tail)
+ }
+ loop(immutable.Set.empty, tpes)
}
- def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = beforeTyper(specializedTypeVars(sym.info))
+ def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = (
+ if (definitions.neverHasTypeParameters(sym)) immutable.Set.empty
+ else enteringTyper(specializedTypeVars(sym.info))
+ )
/** Return the set of @specialized type variables mentioned by the given type.
* It only counts type variables that appear:
@@ -412,7 +424,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType)
- specializedTypeVars(tpe.normalize)
+ specializedTypeVars(tpe.dealiasWiden)
else if (sym.isTypeParameter && sym.isSpecialized || (sym.isTypeSkolem && sym.deSkolemize.isSpecialized))
Set(sym)
else if (sym == ArrayClass)
@@ -430,7 +442,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
case RefinedType(parents, _) => parents flatMap specializedTypeVars toSet
- case _ => Set()
+ case _ => immutable.Set.empty
}
/** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter
@@ -525,7 +537,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) =
member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName)
- sClass.sourceFile = clazz.sourceFile
+ sClass.associatedFile = clazz.sourceFile
currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
val env = mapAnyRefsInSpecSym(env0, clazz, sClass)
@@ -537,7 +549,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
var newClassTParams: List[Symbol] = Nil // unspecialized type parameters of 'specializedClass' (cloned)
// has to be a val in order to be computed early. It is later called
- // within 'atPhase(next)', which would lead to an infinite cycle otherwise
+ // within 'enteringPhase(next)', which would lead to an infinite cycle otherwise
val specializedInfoType: Type = {
oldClassTParams = survivingParams(clazz.info.typeParams, env)
newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env)
@@ -557,7 +569,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
var res: List[Type] = Nil
// log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
for (p <- parents) {
- val stp = afterSpecialize(specializedType(p))
+ val stp = exitingSpecialize(specializedType(p))
if (stp != p)
if (p.typeSymbol.isTrait) res ::= stp
else if (currentRun.compiles(clazz))
@@ -567,7 +579,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
res
}
- var parents = List(applyContext(beforeTyper(clazz.tpe)))
+ var parents = List(applyContext(enteringTyper(clazz.tpe_*)))
// log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
@@ -589,7 +601,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
GenPolyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass))
}
- afterSpecialize(sClass setInfo specializedInfoType)
+ exitingSpecialize(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
/** Enter 'sym' in the scope of the current specialized class. It's type is
@@ -683,7 +695,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def mkAccessor(field: Symbol, name: Name) = {
val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
// we rely on the super class to initialize param accessors
- val sym = sClass.newMethod(name, field.pos, newFlags)
+ val sym = sClass.newMethod(name.toTermName, field.pos, newFlags)
info(sym) = SpecializedAccessor(field)
sym
}
@@ -702,7 +714,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
if (nme.isLocalName(m.name)) {
- val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)) setInfo MethodType(Nil, specVal.info)
+ val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name.toTermName)) setInfo MethodType(Nil, specVal.info)
val origGetter = overrideIn(sClass, m.getter(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
@@ -777,7 +789,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (existing != NoSymbol)
clazz.owner.info.decls.unlink(existing)
- afterSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
+ exitingSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
}
if (subclasses.nonEmpty) clazz.resetFlag(FINAL)
cleanAnyRefSpecCache(clazz, decls1)
@@ -795,7 +807,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
sym :: (
- if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
+ if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil
else {
// debuglog("normalizeMember: " + sym.fullNameAsName('.').decode)
var specializingOn = specializedParams(sym)
@@ -874,6 +886,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
+ owner.info.decls.enter(specMember)
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -902,10 +915,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
/** Return the specialized overload of `m`, in the given environment. */
- private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
+ private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv, nameSymbol: Symbol = NoSymbol): Symbol = {
val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
// this method properly duplicates the symbol's info
- ( sym.cloneSymbol(owner, newFlags, newName = specializedName(sym, env))
+ val specname = specializedName(nameSymbol orElse sym, env)
+ ( sym.cloneSymbol(owner, newFlags, newName = specname)
modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
)
}
@@ -952,7 +966,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
checkOverriddenTParams(overridden)
val env = unify(overridden.info, overriding.info, emptyEnv, false, true)
- def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
+ def atNext = exitingSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) {
debuglog(" " + pp(env) + " found " + atNext)
@@ -965,19 +979,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
(clazz.info.decls flatMap { overriding =>
needsSpecialOverride(overriding) match {
- case (NoSymbol, _) => None
+ case (NoSymbol, _) =>
+ if (overriding.isSuperAccessor) {
+ val alias = overriding.alias
+ debuglog("checking special overload for super accessor: %s, alias for %s".format(overriding.fullName, alias.fullName))
+ needsSpecialOverride(alias) match {
+ case nope @ (NoSymbol, _) => None
+ case (overridden, env) =>
+ val om = specializedOverload(clazz, overriding, env, overridden)
+ om.setName(nme.superName(om.name))
+ om.asInstanceOf[TermSymbol].setAlias(info(alias).target)
+ om.owner.info.decls.enter(om)
+ info(om) = SpecialSuperAccessor(om)
+ om.makeNotPrivate(om.owner)
+ newOverload(overriding, om, env)
+ Some(om)
+ }
+ } else None
case (overridden, env) =>
val om = specializedOverload(clazz, overridden, env)
+ clazz.info.decls.enter(om)
foreachWithIndex(om.paramss) { (params, i) =>
foreachWithIndex(params) { (param, j) =>
param.name = overriding.paramss(i)(j).name // SI-6555 Retain the parameter names from the subclass.
}
}
debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
+ if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE)
typeEnv(om) = env
addConcreteSpecMethod(overriding)
info(om) = (
- if (overriding.isDeferred) { // abstract override
+ if (overriding.isDeferred) { // abstract override
debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
Forward(overriding)
}
@@ -998,7 +1030,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
)
newOverload(overriding, om, env)
- ifDebug(afterSpecialize(assert(
+ ifDebug(exitingSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
)
@@ -1027,7 +1059,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
env + ((sym1, tp2))
else if (isSpecializedAnyRefSubtype(tp2, sym1))
- env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe))
+ env + ((sym1, tp2))
else if (strict)
unifyError(tp1, tp2)
else
@@ -1084,10 +1116,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Apply type bindings in the given environment `env` to all declarations. */
- private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
- decls map subst(env)
-
/** Apply the type environment 'env' to the given type. All type
* bindings are supposed to be to primitive types. A type variable
* that is annotated with 'uncheckedVariance' is mapped to the corresponding
@@ -1114,33 +1142,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def subst(env: TypeEnv)(decl: Symbol): Symbol =
decl modifyInfo (info =>
- if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe)
+ if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe_*)
else subst(env, info)
)
- /** Checks if the type parameter symbol is not specialized
- * and is used as type parameters when extending a class with a specialized
- * type parameter.
- * At some point we may remove this restriction.
- *
- * Example:
- *
- * class Base[@specialized T]
- * class Derived[T] extends Base[T] // a non-specialized T is
- * // used as a type param for Base
- * // -> returning true
- */
- private def notSpecializedIn(tsym: Symbol, supertpe: Type) = supertpe match {
- case TypeRef(_, supersym, supertargs) =>
- val tspec = specializedOn(tsym).toSet
- for (supt <- supersym.typeParams) {
- val supspec = specializedOn(supt).toSet
- if (tspec != supspec && tspec.subsetOf(supspec))
- reporter.error(tsym.pos, "Type parameter has to be specialized at least for the same types as in the superclass. Missing types: " + (supspec.diff(tspec)).mkString(", "))
- }
- case _ => //log("nope")
- }
-
private def unspecializableClass(tp: Type) = (
definitions.isRepeatedParamType(tp) // ???
|| tp.typeSymbol.isJavaDefined
@@ -1156,7 +1161,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) =>
val tparams = tpe.typeParams
if (tparams.isEmpty)
- afterSpecialize(parents map (_.typeSymbol.info))
+ exitingSpecialize(parents map (_.typeSymbol.info))
val parents1 = parents mapConserve specializedType
if (parents ne parents1) {
@@ -1177,7 +1182,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*
* A conflicting type environment could still be satisfiable.
*/
- def conflicting(env: TypeEnv) = !nonConflicting(env)
def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) =>
(subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi))
}
@@ -1247,9 +1251,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
override def castType(tree: Tree, pt: Type): Tree = {
- // log(" expected type: " + pt)
- // log(" tree type: " + tree.tpe)
- tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null
+ tree modifyType fixType
// log(" tree type: " + tree.tpe)
val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) {
val casttpe = CastMap(tree.tpe)
@@ -1257,8 +1259,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt)
else tree
} else tree
- ntree.tpe = null
- ntree
+
+ ntree.clearType()
}
}
@@ -1300,7 +1302,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (sym.isPrivate) debuglog(
"seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format(
sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name))
- )
+ )
if (shouldMakePublic(sym) && !isAccessible(sym)) {
debuglog("changing private flag of " + sym)
sym.makeNotPrivate(sym.owner)
@@ -1385,59 +1387,72 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def transform1(tree: Tree) = {
val symbol = tree.symbol
-
/** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
- def specSym(qual: Tree): Option[Symbol] = {
+ def specSym(qual: Tree): Symbol = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
- .format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
- if (!env.isEmpty) { // a method?
- val specCandidates = qual.tpe.member(specializedName(symbol, env))
- val specMember = specCandidates suchThat { s =>
- doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
- }
+ def isMatch(member: Symbol) = (
+ doesConform(symbol, tree.tpe, qual.tpe memberType member, env)
+ && TypeEnv.includes(typeEnv(member), env)
+ )
+ if (env.isEmpty) NoSymbol
+ else qual.tpe member specializedName(symbol, env) suchThat isMatch
+ }
- debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
- debuglog("[specSym] found specMember: " + specMember)
- if (specMember ne NoSymbol)
- if (TypeEnv.includes(typeEnv(specMember), env)) Some(specMember)
- else {
- debuglog("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
- None
- }
- else None
- } else None
+ def matchingSymbolInPrefix(pre: Type, member: Symbol, env: TypeEnv): Symbol = {
+ pre member specializedName(member, env) suchThat (_.tpe matches subst(env, member.tpe))
+ }
+
+ def transformSelect(sel: Select) = {
+ val Select(qual, name) = sel
+ debuglog(s"specializing Select(sym=${symbol.defString}, tree.tpe=${tree.tpe})")
+
+ val qual1 = transform(qual)
+ def copySelect = treeCopy.Select(tree, qual1, name)
+ def newSelect(member: Symbol) = atPos(tree.pos)(Select(qual1, member))
+ def typedOp(member: Symbol) = localTyper typedOperator newSelect(member)
+ def typedTree(member: Symbol) = localTyper typed newSelect(member)
+
+ val ignoreEnv = specializedTypeVars(symbol.info).isEmpty || name == nme.CONSTRUCTOR
+ if (ignoreEnv) overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) => typedOp(member)
+ case _ => copySelect
+ }
+ else {
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => typedOp(member)
+ case _ =>
+ matchingSymbolInPrefix(qual1.tpe, symbol, env) match {
+ case NoSymbol => copySelect
+ case member if member.isMethod => typedOp(member)
+ case member => typedTree(member)
+ }
+ }
+ }
}
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
def transformNew = {
- debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
- val found = findSpec(tpt.tpe)
- if (found.typeSymbol ne tpt.tpe.typeSymbol) {
- // the ctor can be specialized
- debuglog("** instantiated specialized type: " + found)
- reportError {
- localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
- } {
- _ => super.transform(tree)
+ debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+ val found = specializedType(tpt.tpe)
+ if (found.typeSymbol ne tpt.tpe.typeSymbol) { // the ctor can be specialized
+ val inst = New(found, transformTrees(args): _*)
+ reportError(localTyper.typedPos(tree.pos)(inst))(_ => super.transform(tree))
}
- } else super.transform(tree)
+ else
+ super.transform(tree)
}
transformNew
- case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
- if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
+ case Apply(sel @ Select(sup @ Super(qual, name), name1), args) if hasNewParents(sup) =>
def transformSuperApply = {
-
- def parents = sup.symbol.info.parents
- debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
-
- val res = localTyper.typed(
- Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
- debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
- res
+ val sup1 = Super(qual, name) setPos sup.pos
+ val tree1 = Apply(Select(sup1, name1) setPos sel.pos, transformTrees(args))
+ val res = localTyper.typedPos(tree.pos)(tree1)
+ debuglog(s"retyping call to super, from: $symbol to ${res.symbol}")
+ res
}
transformSuperApply
@@ -1448,7 +1463,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val qual1 = transform(qual)
// log(">>> TypeApply: " + tree + ", qual1: " + qual1)
specSym(qual1) match {
- case Some(specMember) =>
+ case NoSymbol =>
+ // See pos/exponential-spec.scala - can't call transform on the whole tree again.
+ treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), transformTrees(targs))
+ case specMember =>
debuglog("found " + specMember.fullName)
ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs))
@@ -1458,7 +1476,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
// See SI-5583. Don't know why it happens now if it didn't before.
if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
- log("!!! Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
+ devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
localTyper.typed(sel)
}
else {
@@ -1470,11 +1488,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("rewrote " + tree + " to " + tree1)
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
-
- case None =>
- treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
- // See pos/exponential-spec.scala - can't call transform on the whole tree again.
- // super.transform(tree)
}
}
transformTypeApply
@@ -1484,36 +1497,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
tree
- case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
- debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- if (env.isEmpty) super.transform(tree)
- else {
- val qual1 = transform(qual)
- def reselect(member: Symbol) = {
- val newSelect = atPos(tree.pos)(Select(qual1, member))
- if (member.isMethod) localTyper typedOperator newSelect
- else localTyper typed newSelect
- }
- overloads(symbol) find (_ matchesEnv env) match {
- case Some(Overload(member, _)) => reselect(member)
- case _ =>
- val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol)
- reselect(specMember)
- else
- treeCopy.Select(tree, qual1, name)
- }
- }
- case Select(qual, _) =>
- overloads(symbol) find (_ matchesSym symbol) match {
- case Some(Overload(member, _)) =>
- val newTree = Select(transform(qual), member)
- debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
- localTyper.typedOperator(atPos(tree.pos)(newTree))
- case None =>
- super.transform(tree)
- }
+ case sel @ Select(_, _) =>
+ transformSelect(sel)
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
@@ -1538,47 +1523,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
transformTemplate
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
- def transformDefDef = {
- // log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
- def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
-
+ def transformDefDef = {
if (symbol.isConstructor) {
-
- val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
-
+ val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperInitCall, vparamss, symbol.owner))
if (symbol.isPrimaryConstructor)
localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
else // duplicate the original constructor
- reportTypeError(duplicateBody(ddef, info(symbol).target))
+ reportError(duplicateBody(ddef, info(symbol).target))(_ => ddef)
}
else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
- val tree1 = reportTypeError {
- duplicateBody(ddef, target)
- }
+ val tree1 = reportError(duplicateBody(ddef, target))(_ => ddef)
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
case NormalizedMember(target) =>
- val constraints = satisfiabilityConstraints(typeEnv(symbol))
- log("constraints: " + constraints)
- if (target.isDeferred || constraints == None) {
- deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
- } else {
- // we have an rhs, specialize it
- val tree1 = reportTypeError {
- duplicateBody(ddef, target, constraints.get)
- }
- debuglog("implementation: " + tree1)
- deriveDefDef(tree1)(transform)
+ logResult("constraints")(satisfiabilityConstraints(typeEnv(symbol))) match {
+ case Some(constraint) if !target.isDeferred =>
+ // we have an rhs, specialize it
+ val tree1 = reportError(duplicateBody(ddef, target, constraint))(_ => ddef)
+ debuglog("implementation: " + tree1)
+ deriveDefDef(tree1)(transform)
+ case _ =>
+ deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
}
case SpecialOverride(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
//debuglog("moving implementation, body of target " + target + ": " + body(target))
- debuglog("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
+ log("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
// we have an rhs, specialize it
val tree1 = addBody(ddef, target)
(new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
@@ -1626,6 +1601,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case Abstract(targ) =>
debuglog("abstract: " + targ)
localTyper.typed(deriveDefDef(tree)(rhs => rhs))
+
+ case SpecialSuperAccessor(targ) =>
+ debuglog("special super accessor: " + targ + " for " + tree)
+ localTyper.typed(deriveDefDef(tree)(rhs => rhs))
}
}
transformDefDef
@@ -1647,7 +1626,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
deriveValDef(newValDef)(transform)
}
transformValDef
-
case _ =>
super.transform(tree)
}
@@ -1684,7 +1662,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val symbol = tree.symbol
debuglog("specializing body of" + symbol.defString)
val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree
-// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitiveValueType(env(tparam)))
@@ -1711,8 +1688,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
false) // don't make private fields public
val newBody = symSubstituter(body(source).duplicate)
- tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
-
+ tpt modifyType (_.substSym(oldtparams, newtparams))
copyDefDef(tree)(vparamss = List(newSyms map ValDef), rhs = newBody)
}
@@ -1819,6 +1795,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* }}
*/
private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
+ log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)")
/** A constructor parameter `f` initializes a specialized field
* iff:
@@ -1855,16 +1832,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
//! TODO: make sure the param types are seen from the right prefix
map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe))
)
- private def findSpec(tp: Type): Type = tp match {
- case TypeRef(pre, sym, _ :: _) => specializedType(tp)
- case _ => tp
- }
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
override def transform(tree: Tree) = {
val resultTree = if (settings.nospecialization.value) tree
- else afterSpecialize(specializeCalls(unit).transform(tree))
+ else exitingSpecialize(specializeCalls(unit).transform(tree))
// Remove the final modifier and @inline annotation from anything in the
// original class (since it's being overridden in at least onesubclass).
@@ -1884,11 +1857,4 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
resultTree
}
}
-
- def printSpecStats() {
- println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size))
- println(" overloads: %7d".format(overloads.size))
- println(" typeEnv: %7d".format(typeEnv.size))
- println(" info: %7d".format(info.size))
- }
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index a767850cba..2418698a18 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -17,7 +17,7 @@ import Flags.SYNTHETIC
abstract class TailCalls extends Transform {
import global._ // the global environment
import definitions._ // standard classes and methods
- import typer.{ typed, typedPos } // methods to type trees
+ import typer.typedPos // methods to type trees
val phaseName: String = "tailcalls"
@@ -31,7 +31,7 @@ abstract class TailCalls extends Transform {
class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
def apply(unit: global.CompilationUnit) {
if (!(settings.debuginfo.value == "notailcalls")) {
- newTransformer(unit).transformUnit(unit);
+ newTransformer(unit).transformUnit(unit)
}
}
}
@@ -82,7 +82,7 @@ abstract class TailCalls extends Transform {
* that label.
* </p>
* <p>
- * Assumes: <code>Uncurry</code> has been run already, and no multiple
+ * Assumes: `Uncurry` has been run already, and no multiple
* parameter lists exit.
* </p>
*/
@@ -147,10 +147,9 @@ abstract class TailCalls extends Transform {
}
def enclosingType = method.enclClass.typeOfThis
- def methodTypeParams = method.tpe.typeParams
def isEligible = method.isEffectivelyFinal
// @tailrec annotation indicates mandatory transformation
- def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL
+ def isMandatory = method.hasAnnotation(TailrecClass)
def isTransformed = isEligible && accessed(label)
def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
@@ -230,7 +229,6 @@ abstract class TailCalls extends Transform {
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
else if (receiver == EmptyTree) rewriteTailCall(This(currentClass))
- else if (forMSIL) fail("it cannot be optimized on MSIL")
else if (!receiverIsSame) failHere("it changes type of 'this' on a polymorphic recursive call")
else rewriteTailCall(receiver)
}
@@ -394,7 +392,7 @@ abstract class TailCalls extends Transform {
finally maybeTail = saved
}
- def traverseNoTail(tree: Tree) = traverse(tree, false)
+ def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false)
def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
override def traverse(tree: Tree) = tree match {
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index c7bc16f249..3feadcd9b2 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package transform
-import scala.collection.{ mutable, immutable }
-
/** A base class for transforms.
* A transform contains a compiler phase which applies a tree transformer.
*/
@@ -19,17 +17,15 @@ trait TypingTransformers {
abstract class TypingTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer =
if (phase.erasedTypes)
- erasure.newTyper(erasure.rootContext(unit, EmptyTree, true)).asInstanceOf[analyzer.Typer]
+ erasure.newTyper(erasure.rootContext(unit, EmptyTree, erasedTypes = true)).asInstanceOf[analyzer.Typer]
else
analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true))
protected var curTree: Tree = _
- protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
-// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
val result = super.atOwner(owner)(trans)
localTyper = savedLocalTyper
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e9f403aea0..94ca1206b9 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -61,24 +61,6 @@ abstract class UnCurry extends InfoTransform
// uncurry and uncurryType expand type aliases
- /** Traverse tree omitting local method definitions.
- * If a `return` is encountered, set `returnFound` to true.
- * Used for MSIL only.
- */
- private object lookForReturns extends Traverser {
- var returnFound = false
- override def traverse(tree: Tree): Unit = tree match {
- case Return(_) => returnFound = true
- case DefDef(_, _, _, _, _, _) => ;
- case _ => super.traverse(tree)
- }
- def found(tree: Tree) = {
- returnFound = false
- traverse(tree)
- returnFound
- }
- }
-
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
private var needTryLift = false
private var inPattern = false
@@ -112,8 +94,6 @@ abstract class UnCurry extends InfoTransform
private lazy val serialVersionUIDAnnotation =
AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
- private var nprinted = 0
-
// I don't have a clue why I'm catching TypeErrors here, but it's better
// than spewing stack traces at end users for internal errors. Examples
// which hit at this point should not be hard to come by, but the immediate
@@ -134,7 +114,8 @@ abstract class UnCurry extends InfoTransform
def isByNameRef(tree: Tree) = (
tree.isTerm
&& !byNameArgs(tree)
- && tree.hasSymbolWhich(s => isByNameParamType(s.tpe))
+ && (tree.symbol ne null)
+ && (isByName(tree.symbol))
)
/** Uncurry a type of a tree node.
@@ -205,6 +186,9 @@ abstract class UnCurry extends InfoTransform
val keyDef = ValDef(key, New(ObjectClass.tpe))
val tryCatch = Try(body, pat -> rhs)
+ for (Try(t, catches, _) <- body ; cdef <- catches ; if treeInfo catchesThrowable cdef)
+ unit.warning(body.pos, "catch block may intercept non-local return from " + meth)
+
Block(List(keyDef), tryCatch)
}
}
@@ -228,8 +212,6 @@ abstract class UnCurry extends InfoTransform
* }
* new $anon()
*
- * If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
- *
*/
def transformFunction(fun: Function): Tree = {
fun.tpe match {
@@ -245,9 +227,6 @@ abstract class UnCurry extends InfoTransform
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
- case _ if fun.tpe.typeSymbol == PartialFunctionClass =>
- // only get here when running under -Xoldpatmat
- synthPartialFunction(fun)
case _ =>
val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
@@ -277,137 +256,13 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
}
- /** Transform a function node (x => body) of type PartialFunction[T, R] where
- * body = expr match { case P_i if G_i => E_i }_i=1..n
- * to (assuming none of the cases is a default case):
- *
- * class $anon() extends AbstractPartialFunction[T, R] with Serializable {
- * def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = (expr: @unchecked) match {
- * case P_1 if G_1 => E_1
- * ...
- * case P_n if G_n => E_n
- * case _ => default(expr)
- * }
- * def isDefinedAt(x: T): boolean = (x: @unchecked) match {
- * case P_1 if G_1 => true
- * ...
- * case P_n if G_n => true
- * case _ => false
- * }
- * }
- * new $anon()
- *
- * If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true`
- */
- def synthPartialFunction(fun: Function) = {
- if (!settings.XoldPatmat.value) debugwarn("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.")
-
- val targs = fun.tpe.typeArgs
- val (formals, restpe) = (targs.init, targs.last)
-
- val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
- val parents = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*))
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
- // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set)
- // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
- val bodyForIDA = {
- val duped = fun.body.duplicate
- val oldParams = new mutable.ListBuffer[Symbol]()
- val newParams = new mutable.ListBuffer[Symbol]()
-
- val oldSyms0 =
- duped filter {
- case l@LabelDef(_, params, _) =>
- params foreach {p =>
- val oldSym = p.symbol
- p.symbol = oldSym.cloneSymbol
- oldParams += oldSym
- newParams += p.symbol
- }
- true
- case _ => false
- } map (_.symbol)
- val oldSyms = oldParams.toList ++ oldSyms0
- val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol))
- // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain)))
-
- val substLabels = new TreeSymSubstituter(oldSyms, newSyms)
-
- substLabels(duped)
- }
-
- // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- val applyOrElseMethodDef = {
- val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE)
-
- val List(argtpe) = formals
- val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
- val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
- val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
- val params@List(x, default) = methSym newSyntheticValueParams methFormals
- methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
- val body = localTyper.typedPos(fun.pos) { import CODE._
- def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x))
-
- substParam(fun.body) match {
- case orig@Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) orig
- else {
- val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
- Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase)
- }
-
- }
- }
- body.changeOwner(fun.symbol -> methSym)
-
- val methDef = DefDef(methSym, body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
-
- val isDefinedAtMethodDef = {
- val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL | SYNTHETIC)
- val params = methSym newSyntheticValueParams formals
- methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
- def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around
-
- val body = bodyForIDA match {
- case Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) TRUE_typed
- else
- doSubst(Match(/*gen.mkUnchecked*/(selector),
- (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ (
- DEFAULT ==> FALSE_typed)))
-
- }
- body.changeOwner(fun.symbol -> methSym)
-
- DefDef(methSym, body)
- }
-
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
- }
-
def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
val isJava = fun.isJavaDefined
def transformVarargs(varargsElemType: Type) = {
@@ -416,7 +271,7 @@ abstract class UnCurry extends InfoTransform
// when calling into scala varargs, make sure it's a sequence.
def arrayToSequence(tree: Tree, elemtp: Type) = {
- afterUncurry {
+ exitingUncurry {
localTyper.typedPos(pos) {
val pt = arrayType(elemtp)
val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant
@@ -446,7 +301,7 @@ abstract class UnCurry extends InfoTransform
case _ => EmptyTree
}
}
- afterUncurry {
+ exitingUncurry {
localTyper.typedPos(pos) {
gen.mkMethodCall(tree, toArraySym, Nil, List(traversableClassTag(tree.tpe)))
}
@@ -470,7 +325,7 @@ abstract class UnCurry extends InfoTransform
else arrayToSequence(mkArray, varargsElemType)
}
- afterUncurry {
+ exitingUncurry {
if (isJava && !isReferenceArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
// The array isn't statically known to be a reference array, so call ScalaRuntime.toObjectArray.
suffix = localTyper.typedPos(pos) {
@@ -546,13 +401,6 @@ abstract class UnCurry extends InfoTransform
finally needTryLift = saved
}
- /** A try or synchronized needs to be lifted anyway for MSIL if it contains
- * return statements. These are disallowed in the CLR. By lifting
- * such returns will be converted to throws.
- */
- def shouldBeLiftedAnyway(tree: Tree) = false && // buggy, see #1981
- forMSIL && lookForReturns.found(tree)
-
/** Transform tree `t` to { def f = t; f } where `f` is a fresh name
*/
def liftTree(tree: Tree) = {
@@ -588,7 +436,7 @@ abstract class UnCurry extends InfoTransform
if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
- withNeedLift(false) {
+ withNeedLift(needLift = false) {
if (dd.symbol.isClassConstructor) {
atOwner(sym) {
val rhs1 = (rhs: @unchecked) match {
@@ -612,11 +460,11 @@ abstract class UnCurry extends InfoTransform
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
if (!sym.owner.isSourceMethod)
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
else
super.transform(tree)
case UnApply(fn, args) =>
- val fn1 = withInPattern(false)(transform(fn))
+ val fn1 = withInPattern(value = false)(transform(fn))
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args.length))
@@ -625,24 +473,20 @@ abstract class UnCurry extends InfoTransform
treeCopy.UnApply(tree, fn1, args1)
case Apply(fn, args) =>
- if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
- transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- else {
- val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
- withNeedLift(needLift) {
- val formals = fn.tpe.paramTypes
- treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
- }
+ val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
+ withNeedLift(needLift) {
+ val formals = fn.tpe.paramTypes
+ treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
}
case Assign(_: RefTree, _) =>
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
case ret @ Return(_) if (isNonLocalReturn(ret)) =>
- withNeedLift(true) { super.transform(ret) }
+ withNeedLift(needLift = true) { super.transform(ret) }
case Try(_, Nil, _) =>
// try-finally does not need lifting: lifting is needed only for try-catch
@@ -652,11 +496,11 @@ abstract class UnCurry extends InfoTransform
super.transform(tree)
case Try(block, catches, finalizer) =>
- if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
+ if (needTryLift) transform(liftTree(tree))
else super.transform(tree)
case CaseDef(pat, guard, body) =>
- val pat1 = withInPattern(true)(transform(pat))
+ val pat1 = withInPattern(value = true)(transform(pat))
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case fun @ Function(_, _) =>
@@ -677,11 +521,11 @@ abstract class UnCurry extends InfoTransform
tree1
}
)
- assert(result.tpe != null, result + " tpe is null")
+ assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
result setType uncurryTreeType(result.tpe)
}
- def postTransform(tree: Tree): Tree = afterUncurry {
+ def postTransform(tree: Tree): Tree = exitingUncurry {
def applyUnary(): Tree = {
// TODO_NMT: verify that the inner tree of a type-apply also gets parens if the
// whole tree is a polymorphic nullary method application
@@ -708,35 +552,6 @@ abstract class UnCurry extends InfoTransform
def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
- def postTransformTry(tree: Try) = {
- val body = tree.block
- val catches = tree.catches
- val finalizer = tree.finalizer
- if (opt.virtPatmat) {
- if (catches exists (cd => !treeInfo.isCatchCase(cd)))
- debugwarn("VPM BUG! illegal try/catch " + catches)
- tree
- } else if (catches forall treeInfo.isCatchCase) {
- tree
- } else {
- val exname = unit.freshTermName("ex$")
- val cases =
- if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches
- else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
- val catchall =
- atPos(tree.pos) {
- CaseDef(
- Bind(exname, Ident(nme.WILDCARD)),
- EmptyTree,
- Match(Ident(exname), cases))
- }
- debuglog("rewrote try: " + catches + " ==> " + catchall);
- val catches1 = localTyper.typedCases(
- List(catchall), ThrowableClass.tpe, WildcardType)
- treeCopy.Try(tree, body, catches1, finalizer)
- }
- }
-
tree match {
/* Some uncurry post transformations add members to templates.
*
@@ -775,7 +590,9 @@ abstract class UnCurry extends InfoTransform
addJavaVarargsForwarders(dd, flatdd)
case tree: Try =>
- postTransformTry(tree)
+ if (tree.catches exists (cd => !treeInfo.isCatchCase(cd)))
+ devWarning("VPM BUG - illegal try/catch " + tree.catches)
+ tree
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
@@ -832,7 +649,7 @@ abstract class UnCurry extends InfoTransform
final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
def isDependent(dd: DefDef): Boolean =
- beforeUncurry {
+ enteringUncurry {
val methType = dd.symbol.info
methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
}
@@ -915,10 +732,6 @@ abstract class UnCurry extends InfoTransform
if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
return flatdd
- def toSeqType(tp: Type): Type = {
- val arg = elementType(ArrayClass, tp)
- seqType(arg)
- }
def toArrayType(tp: Type): Type = {
val arg = elementType(SeqClass, tp)
// to prevent generation of an `Object` parameter from `Array[T]` parameter later
@@ -953,7 +766,7 @@ abstract class UnCurry extends InfoTransform
}
// create the symbol
- val forwsym = currentClass.newMethod(dd.name, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
+ val forwsym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index 22eabb6d6f..69d9987b05 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -113,7 +113,7 @@ trait Logic extends Debugging {
case object False extends Prop
// symbols are propositions
- abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ abstract case class Sym(variable: Var, const: Const) extends Prop {
private[this] val id = Sym.nextSymId
override def toString = variable +"="+ const +"#"+ id
@@ -212,7 +212,7 @@ trait Logic extends Debugging {
}
props foreach gatherEqualities.apply
- if (modelNull) vars foreach (_.registerNull)
+ if (modelNull) vars foreach (_.registerNull())
val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
@@ -321,7 +321,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
val staticTpCheckable: Type = checkableType(staticTp)
private[this] var _mayBeNull = false
- def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def registerNull(): Unit = { ensureCanModify(); if (NullTp <:< staticTpCheckable) _mayBeNull = true }
def mayBeNull: Boolean = _mayBeNull
// case None => domain is unknown,
@@ -345,16 +345,16 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
} else
subConsts
- observed; allConsts
+ observed(); allConsts
}
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
- def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+ def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
// return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
// (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
- def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+ def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)}
// [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
/** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
@@ -462,7 +462,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
// don't access until all potential equalities have been registered using registerEquality
- private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+ private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList}
// don't call until all equalities have been registered and registerNull has been called (if needed)
def describe = {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index d9f93f27b6..3ee75df6c4 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -657,7 +657,7 @@ trait MatchAnalysis extends MatchApproximation {
cls match {
case ConsClass => ListExample(args())
- case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true))
case _ => ConstructorExample(cls, args())
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 57fab4eafa..416bdf50f0 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -74,7 +74,7 @@ trait MatchCodeGen extends Interface {
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false)
// duplicated out of frustration with cast generation
def mkZero(tp: Type): Tree = {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 90c52e3eb6..23b33e9be6 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -19,7 +19,7 @@ trait MatchTranslation { self: PatternMatching =>
Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
- elimAnonymousClass, asCompactDebugString, hasLength}
+ elimAnonymousClass, asCompactDebugString, hasLength, devWarning}
import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
repeatedToSeq, isRepeatedParamType, getProductArgs}
import global.analyzer.{ErrorUtils, formalTypes}
@@ -71,7 +71,7 @@ trait MatchTranslation { self: PatternMatching =>
}
while (it.hasNext) {
- val cdef = it.next
+ val cdef = it.next()
// If a default case has been seen, then every succeeding case is unreachable.
if (vpat != null)
context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
@@ -173,7 +173,7 @@ trait MatchTranslation { self: PatternMatching =>
(caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
}
- for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList
if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
}
@@ -671,4 +671,4 @@ trait MatchTranslation { self: PatternMatching =>
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index df4e699620..8be8b72130 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -48,9 +48,7 @@ trait PatternMatching extends Transform with TypingTransformers
val phaseName: String = "patmat"
- def newTransformer(unit: CompilationUnit): Transformer =
- if (opt.virtPatmat) new MatchTransformer(unit)
- else noopTransformer
+ def newTransformer(unit: CompilationUnit): Transformer = new MatchTransformer(unit)
class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
override def transform(tree: Tree): Tree = tree match {
@@ -105,11 +103,10 @@ trait Interface extends ast.TreeDSL {
import analyzer.Typer
// 2.10/2.11 compatibility
- protected final def dealiasWiden(tp: Type) = tp.dealias // 2.11: dealiasWiden
- protected final def mkTRUE = CODE.TRUE_typed // 2.11: CODE.TRUE
- protected final def mkFALSE = CODE.FALSE_typed // 2.11: CODE.FALSE
- protected final def hasStableSymbol(p: Tree) = p.hasSymbol && p.symbol.isStable // 2.11: p.hasSymbolField && p.symbol.isStable
- protected final def devWarning(str: String) = global.debugwarn(str) // 2.11: omit
+ protected final def dealiasWiden(tp: Type) = tp.dealiasWiden
+ protected final def mkTRUE = CODE.TRUE
+ protected final def mkFALSE = CODE.FALSE
+ protected final def hasStableSymbol(p: Tree) = p.hasSymbolField && p.symbol.isStable
object vpmName {
val one = newTermName("one")
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 843f831ea1..34cdbeba8e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -71,7 +71,7 @@ trait Solving extends Logic {
val TrueF = formula()
val FalseF = formula(clause())
def lit(s: Sym) = formula(clause(Lit(s)))
- def negLit(s: Sym) = formula(clause(Lit(s, false)))
+ def negLit(s: Sym) = formula(clause(Lit(s, pos = false)))
def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
def distribute(a: Formula, b: Formula, budget: Int): Formula =
@@ -164,7 +164,7 @@ trait Solving extends Logic {
else Nil
}
val forced = unassigned flatMap { s =>
- force(Lit(s, true)) ++ force(Lit(s, false))
+ force(Lit(s, pos = true)) ++ force(Lit(s, pos = false))
}
debug.patmat("forced "+ forced)
val negated = negateModel(model)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index b50486306d..36121f2653 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -16,7 +16,6 @@ trait Analyzer extends AnyRef
with Typers
with Infer
with Implicits
- with Variances
with EtaExpansion
with SyntheticMethods
with Unapplies
@@ -88,22 +87,25 @@ trait Analyzer extends AnyRef
override def run() {
val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null
global.echoPhaseSummary(this)
- currentRun.units foreach applyPhase
- undoLog.clear()
- // need to clear it after as well or 10K+ accumulated entries are
- // uncollectable the rest of the way.
+ for (unit <- currentRun.units) {
+ applyPhase(unit)
+ undoLog.clear()
+ }
if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
- unit.body = newTyper(rootContext(unit)).typed(unit.body)
+ val typer = newTyper(rootContext(unit))
+ unit.body = typer.typed(unit.body)
if (global.settings.Yrangepos.value && !global.reporter.hasErrors) global.validatePositions(unit.body)
for (workItem <- unit.toCheck) workItem()
- } finally {
+ if (settings.lint.value)
+ typer checkUnused unit
+ }
+ finally {
unit.toCheck.clear()
}
}
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 28f620dbb5..4210d0b9fb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -33,7 +33,7 @@ trait AnalyzerPlugins { self: Analyzer =>
/**
* Let analyzer plugins change the expected type before type checking a tree.
*/
- def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = pt
/**
* Let analyzer plugins modify the type that has been computed for a tree.
@@ -44,7 +44,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* @param mode Mode that was used for typing `tree`
* @param pt Expected type that was used for typing `tree`
*/
- def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = tpe
/**
* Let analyzer plugins change the types assigned to definitions. For definitions that have
@@ -133,7 +133,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
* given type tp, taking into account the given mode (see method adapt in trait Typers).
*/
- def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = false
/**
* Adapt a tree that has an annotated type to the given type tp, taking into account the given
@@ -142,7 +142,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
* class cannot do the adapting, it should return the tree unchanged.
*/
- def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = tree
/**
* Modify the type of a return expression. By default, return expressions have type
@@ -169,13 +169,13 @@ trait AnalyzerPlugins { self: Analyzer =>
/** @see AnalyzerPlugin.pluginsPt */
- def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type =
if (analyzerPlugins.isEmpty) pt
else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
/** @see AnalyzerPlugin.pluginsTyped */
- def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
// support deprecated methods in annotation checkers
val annotCheckersTpe = addAnnotations(tree, tpe)
if (analyzerPlugins.isEmpty) annotCheckersTpe
@@ -196,7 +196,7 @@ trait AnalyzerPlugins { self: Analyzer =>
if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
/** @see AnalyzerPlugin.canAdaptAnnotations */
- def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = {
// support deprecated methods in annotation checkers
val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
annotCheckersExists || {
@@ -207,7 +207,7 @@ trait AnalyzerPlugins { self: Analyzer =>
}
/** @see AnalyzerPlugin.adaptAnnotations */
- def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = {
// support deprecated methods in annotation checkers
val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
if (analyzerPlugins.isEmpty) annotCheckersTree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index d30b5c2601..88bfa6099d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -6,12 +6,8 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
-import symtab.Flags._
-import scala.annotation.tailrec
import Checkability._
+import scala.language.postfixOps
/** On pattern matcher checkability:
*
@@ -66,6 +62,9 @@ trait Checkable {
bases foreach { bc =>
val tps1 = (from baseType bc).typeArgs
val tps2 = (tvarType baseType bc).typeArgs
+ if (tps1.size != tps2.size)
+ devWarning(s"Unequally sized type arg lists in propagateKnownTypes($from, $to): ($tps1, $tps2)")
+
(tps1, tps2).zipped foreach (_ =:= _)
// Alternate, variance respecting formulation causes
// neg/unchecked3.scala to fail (abstract types). TODO -
@@ -82,7 +81,7 @@ trait Checkable {
val resArgs = tparams zip tvars map {
case (_, tvar) if tvar.instValid => tvar.constr.inst
- case (tparam, _) => tparam.tpe
+ case (tparam, _) => tparam.tpeHK
}
appliedType(to, resArgs: _*)
}
@@ -112,7 +111,7 @@ trait Checkable {
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = propagateKnownTypes(X, Psym)
+ def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
def P1 = X matchesPattern P
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
@@ -134,7 +133,7 @@ trait Checkable {
else if (P3) RuntimeCheckable
else if (uncheckableType == NoType) {
// Avoid warning (except ourselves) if we can't pinpoint the uncheckable type
- debugwarn("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
+ debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
CheckabilityError
}
else Uncheckable
@@ -154,6 +153,7 @@ trait Checkable {
def neverSubClass = isNeverSubClass(Xsym, Psym)
def neverMatches = result == StaticallyFalse
def isUncheckable = result == Uncheckable
+ def isCheckable = !isUncheckable
def uncheckableMessage = uncheckableType match {
case NoType => "something"
case tp @ RefinedType(_, _) => "refinement " + tp
@@ -203,11 +203,12 @@ trait Checkable {
def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2)
private def isNeverSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean = /*logResult(s"isNeverSubArgs($tps1, $tps2, $tparams)")*/ {
- def isNeverSubArg(t1: Type, t2: Type, variance: Int) = {
- if (variance > 0) isNeverSubType(t2, t1)
- else if (variance < 0) isNeverSubType(t1, t2)
- else isNeverSameType(t1, t2)
- }
+ def isNeverSubArg(t1: Type, t2: Type, variance: Variance) = (
+ if (variance.isInvariant) isNeverSameType(t1, t2)
+ else if (variance.isCovariant) isNeverSubType(t2, t1)
+ else if (variance.isContravariant) isNeverSubType(t1, t2)
+ else false
+ )
exists3(tps1, tps2, tparams map (_.variance))(isNeverSubArg)
}
private def isNeverSameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
@@ -232,6 +233,17 @@ trait Checkable {
trait InferCheckable {
self: Inferencer =>
+ def isUncheckable(P0: Type) = !isCheckable(P0)
+
+ def isCheckable(P0: Type): Boolean = (
+ uncheckedOk(P0) || (P0.widen match {
+ case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case p =>
+ new CheckabilityChecker(AnyClass.tpe, p) isCheckable
+ })
+ )
+
/** TODO: much better error positions.
* Kind of stuck right now because they just pass us the one tree.
* TODO: Eliminate inPattern, canRemedy, which have no place here.
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 89e2ee44be..65bfd8e34e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package typechecker
-
import java.lang.ArithmeticException
/** This class ...
@@ -18,7 +17,6 @@ abstract class ConstantFolder {
val global: Global
import global._
- import definitions._
/** If tree is a constant operation, replace with result. */
def apply(tree: Tree): Tree = fold(tree, tree match {
@@ -29,9 +27,6 @@ abstract class ConstantFolder {
/** If tree is a constant value that can be converted to type `pt`, perform
* the conversion.
- *
- * @param tree ...
- * @param pt ...
*/
def apply(tree: Tree, pt: Type): Tree = fold(apply(tree), tree.tpe match {
case ConstantType(x) => x convertTo pt
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4bf7f78167..0af75a2aad 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -6,14 +6,14 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString }
-import symtab.Flags.{ PRIVATE, PROTECTED, IS_ERROR }
+import symtab.Flags.IS_ERROR
import scala.compat.Platform.EOL
import scala.reflect.runtime.ReflectionUtils
import scala.reflect.macros.runtime.AbortMacroException
import scala.util.control.NonFatal
import scala.tools.nsc.util.stackTraceString
+import scala.reflect.io.NoAbstractFile
trait ContextErrors {
self: Analyzer =>
@@ -153,11 +153,10 @@ trait ContextErrors {
// members present, then display along with the expected members. This is done here because
// this is the last point where we still have access to the original tree, rather than just
// the found/req types.
- val foundType: Type = req.normalize match {
+ val foundType: Type = req.dealiasWiden match {
case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
- val retyped = typed (tree.duplicate setType null)
+ val retyped = typed (tree.duplicate.clearType())
val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
-
if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found
else {
// The members arrive marked private, presumably because there was no
@@ -171,11 +170,11 @@ trait ContextErrors {
case _ =>
found
}
- assert(!found.isErroneous && !req.isErroneous, (found, req))
+ assert(!foundType.isErroneous && !req.isErroneous, (foundType, req))
- issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(foundType, req, infer.isPossiblyMissingArgs(foundType, req))) )
if (settings.explaintypes.value)
- explainTypes(found, req)
+ explainTypes(foundType, req)
}
def WithFilterError(tree: Tree, ex: AbsTypeError) = {
@@ -184,14 +183,18 @@ trait ContextErrors {
}
def ParentTypesError(templ: Template, ex: TypeError) = {
- templ.tpe = null
- issueNormalTypeError(templ, ex.getMessage())
+ templ.clearType()
+ issueNormalTypeError(templ, ex.getMessage())
+ setError(templ)
}
// additional parentTypes errors
- def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
+ def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) =
issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
+ def ConstrArgsInParentOfTraitError(arg: Tree, parent: Symbol) =
+ issueNormalTypeError(arg, "parents of traits may not have parameters")
+
def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
issueNormalTypeError(supertpt, "missing type arguments")
@@ -513,7 +516,7 @@ trait ContextErrors {
NormalTypeError(tree, fun.tpe+" does not take parameters")
// Dynamic
- def DynamicVarArgUnsupported(tree: Tree, name: String) =
+ def DynamicVarArgUnsupported(tree: Tree, name: Name) =
issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
@@ -559,11 +562,13 @@ trait ContextErrors {
//adapt
def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
- issueNormalTypeError(tree,
- "missing arguments for " + meth.fullLocationString + (
+ val message =
+ if (meth.isMacro) MacroPartialApplicationErrorMessage
+ else "missing arguments for " + meth.fullLocationString + (
if (meth.isConstructor) ""
else ";\nfollow this method with `_' if you want to treat it as a partially applied function"
- ))
+ )
+ issueNormalTypeError(tree, message)
setError(tree)
}
@@ -640,7 +645,7 @@ trait ContextErrors {
val addendums = List(
if (sym0.associatedFile eq sym1.associatedFile)
Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath))
- else if ((sym0.associatedFile ne null) && (sym1.associatedFile ne null))
+ else if ((sym0.associatedFile ne NoAbstractFile) && (sym1.associatedFile ne NoAbstractFile))
Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath))
else None ,
if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None
@@ -657,8 +662,8 @@ trait ContextErrors {
def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
- def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
- issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+ def CyclicReferenceError(errPos: Position, tp: Type, lockedSym: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, s"illegal cyclic reference involving $tp and $lockedSym"))
// macro-related errors (also see MacroErrors below)
@@ -667,22 +672,30 @@ trait ContextErrors {
setError(tree)
}
+ def MacroTooManyArgumentListsError(expandee: Tree, fun: Symbol) = {
+ NormalTypeError(expandee, "too many argument lists for " + fun)
+ }
+
+ def MacroInvalidExpansionError(expandee: Tree, role: String, allowedExpansions: String) = {
+ issueNormalTypeError(expandee, s"macro in $role role can only expand into $allowedExpansions")
+ }
+
// same reason as for MacroBodyTypecheckException
case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
- private def macroExpansionError(expandee: Tree, msg: String = null, pos: Position = NoPosition) = {
+ protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
macroLogLite("macro expansion has failed: %s".format(msgForLog))
- val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
setError(expandee)
throw MacroExpansionException
}
+ def MacroPartialApplicationErrorMessage = "macros cannot be partially applied"
def MacroPartialApplicationError(expandee: Tree) = {
// macroExpansionError won't work => swallows positions, hence needed to do issueTypeError
// kinda contradictory to the comment in `macroExpansionError`, but this is how it works
- issueNormalTypeError(expandee, "macros cannot be partially applied")
+ issueNormalTypeError(expandee, MacroPartialApplicationErrorMessage)
setError(expandee)
throw MacroExpansionException
}
@@ -748,23 +761,26 @@ trait ContextErrors {
macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten))
}
- def MacroExpansionIsNotExprError(expandee: Tree, expanded: Any) =
+ def MacroExpansionHasInvalidTypeError(expandee: Tree, expanded: Any) = {
+ val expected = "expr"
+ val isPathMismatch = expanded != null && expanded.isInstanceOf[scala.reflect.api.Exprs#Expr[_]]
macroExpansionError(expandee,
- "macro must return a compiler-specific expr; returned value is " + (
+ s"macro must return a compiler-specific $expected; returned value is " + (
if (expanded == null) "null"
- else if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
+ else if (isPathMismatch) s" $expected, but it doesn't belong to this compiler"
else " of " + expanded.getClass
))
-
- def MacroImplementationNotFoundError(expandee: Tree) = {
- val message =
- "macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
- (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
- else "")
- macroExpansionError(expandee, message)
}
+
+ def MacroImplementationNotFoundError(expandee: Tree) =
+ macroExpansionError(expandee, macroImplementationNotFoundMessage(expandee.symbol.name))
}
+
+ /** This file will be the death of me. */
+ protected def macroImplementationNotFoundMessage(name: Name): String = (
+ s"""|macro implementation not found: $name
+ |(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)""".stripMargin
+ )
}
trait InferencerContextErrors {
@@ -806,7 +822,10 @@ trait ContextErrors {
)
}
- def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = {
+ def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError =
+ AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation)
+
+ def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String): AbsTypeError = {
def errMsg = {
val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString
@@ -1033,8 +1052,8 @@ trait ContextErrors {
val s1 = if (prevSym.isModule) "case class companion " else ""
val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else ""
val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym
- val where = if (currentSym.owner.isPackageClass != prevSym.owner.isPackageClass) {
- val inOrOut = if (prevSym.owner.isPackageClass) "outside of" else "in"
+ val where = if (currentSym.isTopLevel != prevSym.isTopLevel) {
+ val inOrOut = if (prevSym.isTopLevel) "outside of" else "in"
" %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name)
} else ""
@@ -1044,9 +1063,6 @@ trait ContextErrors {
def MaxParametersCaseClassError(tree: Tree) =
issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
- def InheritsItselfError(tree: Tree) =
- issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
-
def MissingParameterOrValTypeError(vparam: Tree) =
issueNormalTypeError(vparam, "missing parameter type")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index f2a2ef4d61..429bd7d682 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package typechecker
-import symtab.Flags._
-import scala.collection.mutable.{LinkedHashSet, Set}
+import scala.collection.mutable
import scala.annotation.tailrec
+import scala.reflect.internal.util.shortClassOfInstance
/**
* @author Martin Odersky
@@ -16,6 +16,7 @@ import scala.annotation.tailrec
*/
trait Contexts { self: Analyzer =>
import global._
+ import definitions.{ JavaLangPackage, ScalaPackage, PredefModule }
object NoContext extends Context {
outer = this
@@ -28,13 +29,17 @@ trait Contexts { self: Analyzer =>
override def toString = "NoContext"
}
private object RootImports {
- import definitions._
// Possible lists of root imports
val javaList = JavaLangPackage :: Nil
val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
+ def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) =
+ LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2")
+ def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) =
+ LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp")
+
private lazy val startContext = {
NoContext.make(
Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
@@ -42,6 +47,25 @@ trait Contexts { self: Analyzer =>
rootMirror.RootClass.info.decls)
}
+ private lazy val allUsedSelectors =
+ mutable.Map[ImportInfo, Set[ImportSelector]]() withDefaultValue Set()
+ private lazy val allImportInfos =
+ mutable.Map[CompilationUnit, List[ImportInfo]]() withDefaultValue Nil
+
+ def warnUnusedImports(unit: CompilationUnit) = {
+ for (imps <- allImportInfos.remove(unit)) {
+ for (imp <- imps.reverse.distinct) {
+ val used = allUsedSelectors(imp)
+ def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
+
+ imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel =>
+ unit.warning(imp posOf sel, "Unused import")
+ }
+ }
+ allUsedSelectors --= imps
+ }
+ }
+
var lastAccessCheckDetails: String = ""
/** List of symbols to import from in a root context. Typically that
@@ -61,10 +85,9 @@ trait Contexts { self: Analyzer =>
else RootImports.completeList
}
- def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false)
- def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false)
+ def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, erasedTypes = false)
+ def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, erasedTypes = false)
def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = {
- import definitions._
var sc = startContext
for (sym <- rootImports(unit)) {
sc = sc.makeNewImport(sym)
@@ -81,8 +104,8 @@ trait Contexts { self: Analyzer =>
var sc = startContext
while (sc != NoContext) {
sc.tree match {
- case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol)
- case _ =>
+ case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol)
+ case _ =>
}
sc = sc.outer
}
@@ -113,7 +136,7 @@ trait Contexts { self: Analyzer =>
}
var enclMethod: Context = _ // The next outer context whose tree is a method
- var variance: Int = _ // Variance relative to enclosing class
+ var variance: Variance = Variance.Invariant // Variance relative to enclosing class
private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
// not inherited to child contexts
var depth: Int = 0
@@ -143,13 +166,14 @@ trait Contexts { self: Analyzer =>
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
- var buffer: Set[AbsTypeError] = _
- var warningsBuffer: Set[(Position, String)] = _
+ var buffer: mutable.Set[AbsTypeError] = _
+ var warningsBuffer: mutable.Set[(Position, String)] = _
def enclClassOrMethod: Context =
if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
else outer.enclClassOrMethod
+ def enclosingCaseDef = nextEnclosing(_.tree.isInstanceOf[CaseDef])
def undetparamsString =
if (undetparams.isEmpty) ""
else undetparams.mkString("undetparams=", ", ", "")
@@ -184,25 +208,23 @@ trait Contexts { self: Analyzer =>
def setThrowErrors() = mode &= (~AllMask)
def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask
- def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors
+ def updateBuffer(errors: mutable.Set[AbsTypeError]) = buffer ++= errors
def condBufferFlush(removeP: AbsTypeError => Boolean) {
val elems = buffer.filter(removeP)
buffer --= elems
}
def flushBuffer() { buffer.clear() }
- def flushAndReturnBuffer(): Set[AbsTypeError] = {
+ def flushAndReturnBuffer(): mutable.Set[AbsTypeError] = {
val current = buffer.clone()
buffer.clear()
current
}
- def flushAndReturnWarningsBuffer(): Set[(Position, String)] = {
+ def flushAndReturnWarningsBuffer(): mutable.Set[(Position, String)] = {
val current = warningsBuffer.clone()
warningsBuffer.clear()
current
}
- def logError(err: AbsTypeError) = buffer += err
-
def withImplicitsEnabled[T](op: => T): T = {
val saved = implicitsEnabled
implicitsEnabled = true
@@ -288,27 +310,23 @@ trait Contexts { self: Analyzer =>
c.checking = this.checking
c.retyping = this.retyping
c.openImplicits = this.openImplicits
- c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
- c.warningsBuffer = if (this.warningsBuffer == null) LinkedHashSet[(Position, String)]() else this.warningsBuffer
+ c.buffer = if (this.buffer == null) mutable.LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
+ c.warningsBuffer = if (this.warningsBuffer == null) mutable.LinkedHashSet[(Position, String)]() else this.warningsBuffer
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
}
- // TODO: remove? Doesn't seem to be used
- def make(unit: CompilationUnit): Context = {
- val c = make(unit, EmptyTree, owner, scope, imports)
- c.setReportErrors()
- c.implicitsEnabled = true
- c.macrosEnabled = true
- c
- }
-
def makeNewImport(sym: Symbol): Context =
makeNewImport(gen.mkWildcardImport(sym))
- def makeNewImport(imp: Import): Context =
- make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports)
+ def makeNewImport(imp: Import): Context = {
+ val impInfo = new ImportInfo(imp, depth)
+ if (settings.lint.value && imp.pos.isDefined) // pos.isDefined excludes java.lang/scala/Predef imports
+ allImportInfos(unit) ::= impInfo
+
+ make(unit, imp, owner, scope, impInfo :: imports)
+ }
def make(tree: Tree, owner: Symbol, scope: Scope): Context =
if (tree == this.tree && owner == this.owner && scope == this.scope) this
@@ -331,7 +349,7 @@ trait Contexts { self: Analyzer =>
val c = make(newtree)
c.setBufferErrors()
c.setAmbiguousErrors(reportAmbiguousErrors)
- c.buffer = new LinkedHashSet[AbsTypeError]()
+ c.buffer = mutable.LinkedHashSet[AbsTypeError]()
c
}
@@ -388,8 +406,10 @@ trait Contexts { self: Analyzer =>
unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
@inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
- debugwarn("issue error: " + err.errMsg)
- if (settings.Yissuedebug.value) (new Exception).printStackTrace()
+ if (settings.Yissuedebug.value) {
+ log("issue error: " + err.errMsg)
+ (new Exception).printStackTrace()
+ }
if (pf isDefinedAt err) pf(err)
else if (bufferErrors) { buffer += err }
else throw new TypeError(err.errPos, err.errMsg)
@@ -423,7 +443,7 @@ trait Contexts { self: Analyzer =>
else throw new TypeError(pos, msg1)
}
- def warning(pos: Position, msg: String): Unit = warning(pos, msg, false)
+ def warning(pos: Position, msg: String): Unit = warning(pos, msg, force = false)
def warning(pos: Position, msg: String, force: Boolean) {
if (reportErrors || force) unit.warning(pos, msg)
else if (bufferErrors) warningsBuffer += ((pos, msg))
@@ -436,16 +456,7 @@ trait Contexts { self: Analyzer =>
case _ => outer.isLocal()
}
- /** Fast path for some slow checks (ambiguous assignment in Refchecks, and
- * existence of __match for MatchTranslation in virtpatmat.) This logic probably
- * needs improvement.
- */
- def isNameInScope(name: Name) = (
- enclosingContextChain exists (ctx =>
- (ctx.scope.lookupEntry(name) != null)
- || (ctx.owner.rawInfo.member(name) != NoSymbol)
- )
- )
+ def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
// nextOuter determines which context is searched next for implicits
// (after `this`, which contributes `newImplicits` below.) In
@@ -480,17 +491,6 @@ trait Contexts { self: Analyzer =>
sub.isNonBottomSubClass(base) ||
sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
- /** Return closest enclosing context that defines a superclass of `clazz`, or a
- * companion module of a superclass of `clazz`, or NoContext if none exists */
- def enclosingSuperClassContext(clazz: Symbol): Context = {
- var c = this.enclClass
- while (c != NoContext &&
- !clazz.isNonBottomSubClass(c.owner) &&
- !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass)))
- c = c.outer.enclClass
- c
- }
-
/** Return the closest enclosing context that defines a subclass of `clazz`
* or a companion object thereof, or `NoContext` if no such context exists.
*/
@@ -501,8 +501,7 @@ trait Contexts { self: Analyzer =>
c
}
- /** Is `sym` accessible as a member of tree `site` with type
- * `pre` in current context?
+ /** Is `sym` accessible as a member of `pre` in current context?
*/
def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = {
lastAccessCheckDetails = ""
@@ -528,20 +527,6 @@ trait Contexts { self: Analyzer =>
} else (owner hasTransOwner ab)
}
-/*
- var c = this
- while (c != NoContext && c.owner != owner) {
- if (c.outer eq null) abort("accessWithin(" + owner + ") " + c);//debug
- if (c.outer.enclClass eq null) abort("accessWithin(" + owner + ") " + c);//debug
- c = c.outer.enclClass
- }
- c != NoContext
- }
-*/
- /** Is `clazz` a subclass of an enclosing class? */
- def isSubClassOfEnclosing(clazz: Symbol): Boolean =
- enclosingSuperClassContext(clazz) != NoContext
-
def isSubThisType(pre: Type, clazz: Symbol): Boolean = pre match {
case ThisType(pclazz) => pclazz isNonBottomSubClass clazz
case _ => false
@@ -587,8 +572,7 @@ trait Contexts { self: Analyzer =>
( superAccess
|| pre.isInstanceOf[ThisType]
|| phase.erasedTypes
- || isProtectedAccessOK(sym)
- || (sym.allOverriddenSymbols exists isProtectedAccessOK)
+ || (sym.overrideChain exists isProtectedAccessOK)
// that last condition makes protected access via self types work.
)
)
@@ -599,23 +583,39 @@ trait Contexts { self: Analyzer =>
}
def pushTypeBounds(sym: Symbol) {
+ sym.info match {
+ case tb: TypeBounds => if (!tb.isEmptyBounds) log(s"Saving $sym info=$tb")
+ case info => devWarning(s"Something other than a TypeBounds seen in pushTypeBounds: $info is a ${shortClassOfInstance(info)}")
+ }
savedTypeBounds ::= ((sym, sym.info))
}
def restoreTypeBounds(tp: Type): Type = {
- var current = tp
- for ((sym, info) <- savedTypeBounds) {
- debuglog("resetting " + sym + " to " + info);
- sym.info match {
- case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) =>
- current = current.instantiateTypeParams(List(sym), List(lo))
-//@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
- case _ =>
- }
- sym.setInfo(info)
+ def restore(): Type = savedTypeBounds.foldLeft(tp) { case (current, (sym, savedInfo)) =>
+ def bounds_s(tb: TypeBounds) = if (tb.isEmptyBounds) "<empty bounds>" else s"TypeBounds(lo=${tb.lo}, hi=${tb.hi})"
+ //@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
+ val tb @ TypeBounds(lo, hi) = sym.info.bounds
+ val isUnique = lo <:< hi && hi <:< lo
+ val isPresent = current contains sym
+ def saved_s = bounds_s(savedInfo.bounds)
+ def current_s = bounds_s(sym.info.bounds)
+
+ if (isUnique && isPresent)
+ devWarningResult(s"Preserving inference: ${sym.nameString}=$hi in $current (based on $current_s) before restoring $sym to saved $saved_s")(
+ current.instantiateTypeParams(List(sym), List(hi))
+ )
+ else if (isPresent)
+ devWarningResult(s"Discarding inferred $current_s because it does not uniquely determine $sym in")(current)
+ else
+ logResult(s"Discarding inferred $current_s because $sym does not appear in")(current)
+ }
+ try restore()
+ finally {
+ for ((sym, savedInfo) <- savedTypeBounds)
+ sym setInfo debuglogResult(s"Discarding inferred $sym=${sym.info}, restoring saved info")(savedInfo)
+
+ savedTypeBounds = Nil
}
- savedTypeBounds = List()
- current
}
private var implicitsCache: List[List[ImplicitInfo]] = null
@@ -660,7 +660,7 @@ trait Contexts { self: Analyzer =>
case ImportSelector(from, _, to, _) :: sels1 =>
var impls = collect(sels1) filter (info => info.name != from)
if (to != nme.WILDCARD) {
- for (sym <- imp.importedSymbol(to).alternatives)
+ for (sym <- importedAccessibleSymbol(imp, to).alternatives)
if (isQualifyingImplicit(to, sym, pre, imported = true))
impls = new ImplicitInfo(to, pre, sym) :: impls
}
@@ -706,6 +706,280 @@ trait Contexts { self: Analyzer =>
implicitsCache
}
+ /** It's possible that seemingly conflicting identifiers are
+ * identifiably the same after type normalization. In such cases,
+ * allow compilation to proceed. A typical example is:
+ * package object foo { type InputStream = java.io.InputStream }
+ * import foo._, java.io._
+ */
+ private def resolveAmbiguousImport(name: Name, imp1: ImportInfo, imp2: ImportInfo): Option[ImportInfo] = {
+ val imp1Explicit = imp1 isExplicitImport name
+ val imp2Explicit = imp2 isExplicitImport name
+ val ambiguous = if (imp1.depth == imp2.depth) imp1Explicit == imp2Explicit else !imp1Explicit && imp2Explicit
+ val imp1Symbol = (imp1 importedSymbol name).initialize filter (s => isAccessible(s, imp1.qual.tpe, superAccess = false))
+ val imp2Symbol = (imp2 importedSymbol name).initialize filter (s => isAccessible(s, imp2.qual.tpe, superAccess = false))
+
+ // The types of the qualifiers from which the ambiguous imports come.
+ // If the ambiguous name is a value, these must be the same.
+ def t1 = imp1.qual.tpe
+ def t2 = imp2.qual.tpe
+ // The types of the ambiguous symbols, seen as members of their qualifiers.
+ // If the ambiguous name is a monomorphic type, we can relax this far.
+ def mt1 = t1 memberType imp1Symbol
+ def mt2 = t2 memberType imp2Symbol
+
+ def characterize = List(
+ s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}",
+ s"member type 1: $mt1",
+ s"member type 2: $mt2"
+ ).mkString("\n ")
+
+ if (!ambiguous || !imp2Symbol.exists) Some(imp1)
+ else if (!imp1Symbol.exists) Some(imp2)
+ else (
+ // The symbol names are checked rather than the symbols themselves because
+ // each time an overloaded member is looked up it receives a new symbol.
+ // So foo.member("x") != foo.member("x") if x is overloaded. This seems
+ // likely to be the cause of other bugs too...
+ if (t1 =:= t2 && imp1Symbol.name == imp2Symbol.name) {
+ log(s"Suppressing ambiguous import: $t1 =:= $t2 && $imp1Symbol == $imp2Symbol")
+ Some(imp1)
+ }
+ // Monomorphism restriction on types is in part because type aliases could have the
+ // same target type but attach different variance to the parameters. Maybe it can be
+ // relaxed, but doesn't seem worth it at present.
+ else if (mt1 =:= mt2 && name.isTypeName && imp1Symbol.isMonomorphicType && imp2Symbol.isMonomorphicType) {
+ log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $imp1Symbol and $imp2Symbol are equivalent")
+ Some(imp1)
+ }
+ else {
+ log(s"Import is genuinely ambiguous:\n " + characterize)
+ None
+ }
+ )
+ }
+
+ /** The symbol with name `name` imported via the import in `imp`,
+ * if any such symbol is accessible from this context.
+ */
+ def importedAccessibleSymbol(imp: ImportInfo, name: Name): Symbol =
+ importedAccessibleSymbol(imp, name, requireExplicit = false)
+
+ private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol =
+ imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false))
+
+ /** Is `sym` defined in package object of package `pkg`?
+ * Since sym may be defined in some parent of the package object,
+ * we cannot inspect its owner only; we have to go through the
+ * info of the package object. However to avoid cycles we'll check
+ * what other ways we can before pushing that way.
+ */
+ def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = {
+ def uninitialized(what: String) = {
+ log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.")
+ false
+ }
+ def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg
+ def matchesInfo = (
+ // need to be careful here to not get a cyclic reference during bootstrap
+ if (pkg.isInitialized) {
+ val module = pkg.info member nme.PACKAGEkw
+ if (module.isInitialized)
+ module.info.member(sym.name).alternatives contains sym
+ else
+ uninitialized("" + module)
+ }
+ else uninitialized("" + pkg)
+ )
+ def inPackageObject(sym: Symbol) = (
+ // To be in the package object, one of these must be true:
+ // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg`
+ // 2) sym.owner is inherited by the correct package object class
+ // We try to establish 1) by inspecting the owners directly, and then we try
+ // to rule out 2), and only if both those fail do we resort to looking in the info.
+ !sym.isPackage && (sym.owner ne NoSymbol) && (
+ if (sym.owner.isPackageObjectClass)
+ sym.owner.owner == pkgClass
+ else
+ !sym.owner.isPackageClass && matchesInfo
+ )
+ )
+
+ // An overloaded symbol might not have the expected owner!
+ // The alternatives must be inspected directly.
+ pkgClass.isPackageClass && (
+ if (sym.isOverloaded)
+ sym.alternatives forall (isInPackageObject(_, pkg))
+ else
+ inPackageObject(sym)
+ )
+ }
+
+ /** Find the symbol of a simple name starting from this context.
+ * All names are filtered through the "qualifies" predicate,
+ * the search continuing as long as no qualifying name is found.
+ */
+ def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = {
+ var lookupError: NameLookup = null // set to non-null if a definite error is encountered
+ var inaccessible: NameLookup = null // records inaccessible symbol for error reporting in case none is found
+ var defSym: Symbol = NoSymbol // the directly found symbol
+ var pre: Type = NoPrefix // the prefix type of defSym, if a class member
+ var cx: Context = this // the context under consideration
+ var symbolDepth: Int = -1 // the depth of the directly found symbol
+
+ def finish(qual: Tree, sym: Symbol): NameLookup = (
+ if (lookupError ne null) lookupError
+ else sym match {
+ case NoSymbol if inaccessible ne null => inaccessible
+ case NoSymbol => LookupNotFound
+ case _ => LookupSucceeded(qual, sym)
+ }
+ )
+ def finishDefSym(sym: Symbol, pre0: Type): NameLookup =
+ if (requiresQualifier(sym))
+ finish(gen.mkAttributedQualifier(pre0), sym)
+ else
+ finish(EmptyTree, sym)
+
+ def isPackageOwnedInDifferentUnit(s: Symbol) = (
+ s.isDefinedInPackage && (
+ !currentRun.compiles(s)
+ || unit.exists && s.sourceFile != unit.source.file
+ )
+ )
+ def requiresQualifier(s: Symbol) = (
+ s.owner.isClass
+ && !s.owner.isPackageClass
+ && !s.isTypeParameterOrSkolem
+ )
+ def lookupInPrefix(name: Name) = pre member name filter qualifies
+ def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false)
+
+ def searchPrefix = {
+ cx = cx.enclClass
+ val found0 = lookupInPrefix(name)
+ val found1 = found0 filter accessibleInPrefix
+ if (found0.exists && !found1.exists && inaccessible == null)
+ inaccessible = LookupInaccessible(found0, analyzer.lastAccessCheckDetails)
+
+ found1
+ }
+
+ def lookupInScope(scope: Scope) =
+ (scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList
+
+ def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) =
+ logResult(s"!!! lookup overloaded")(owner.newOverloaded(pre, entries map (_.sym)))
+
+ // Constructor lookup should only look in the decls of the enclosing class
+ // not in the self-type, nor in the enclosing context, nor in imports (SI-4460, SI-6745)
+ if (name == nme.CONSTRUCTOR) return {
+ val enclClassSym = cx.enclClass.owner
+ val scope = cx.enclClass.prefix.baseType(enclClassSym).decls
+ val constructorSym = lookupInScope(scope) match {
+ case Nil => NoSymbol
+ case hd :: Nil => hd.sym
+ case entries => newOverloaded(enclClassSym, cx.enclClass.prefix, entries)
+ }
+ finishDefSym(constructorSym, cx.enclClass.prefix)
+ }
+
+ // cx.scope eq null arises during FixInvalidSyms in Duplicators
+ while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) {
+ pre = cx.enclClass.prefix
+ defSym = lookupInScope(cx.scope) match {
+ case Nil => searchPrefix
+ case entries @ (hd :: tl) =>
+ // we have a winner: record the symbol depth
+ symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth
+ if (tl.isEmpty) hd.sym
+ else newOverloaded(cx.owner, pre, entries)
+ }
+ if (!defSym.exists)
+ cx = cx.outer // push further outward
+ }
+ if (symbolDepth < 0)
+ symbolDepth = cx.depth
+
+ var impSym: Symbol = NoSymbol
+ var imports = Context.this.imports
+ def imp1 = imports.head
+ def imp2 = imports.tail.head
+ def sameDepth = imp1.depth == imp2.depth
+ def imp1Explicit = imp1 isExplicitImport name
+ def imp2Explicit = imp2 isExplicitImport name
+
+ def lookupImport(imp: ImportInfo, requireExplicit: Boolean) =
+ importedAccessibleSymbol(imp, name, requireExplicit) filter qualifies
+
+ while (!impSym.exists && imports.nonEmpty && imp1.depth > symbolDepth) {
+ impSym = lookupImport(imp1, requireExplicit = false)
+ if (!impSym.exists)
+ imports = imports.tail
+ }
+
+ if (defSym.exists && impSym.exists) {
+ // imported symbols take precedence over package-owned symbols in different compilation units.
+ if (isPackageOwnedInDifferentUnit(defSym))
+ defSym = NoSymbol
+ // Defined symbols take precedence over erroneous imports.
+ else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
+ impSym = NoSymbol
+ // Otherwise they are irreconcilably ambiguous
+ else
+ return ambiguousDefnAndImport(defSym.owner, imp1)
+ }
+
+ // At this point only one or the other of defSym and impSym might be set.
+ if (defSym.exists)
+ finishDefSym(defSym, pre)
+ else if (impSym.exists) {
+ // We continue walking down the imports as long as the tail is non-empty, which gives us:
+ // imports == imp1 :: imp2 :: _
+ // And at least one of the following is true:
+ // - imp1 and imp2 are at the same depth
+ // - imp1 is a wildcard import, so all explicit imports from outer scopes must be checked
+ def keepLooking = (
+ lookupError == null
+ && imports.tail.nonEmpty
+ && (sameDepth || !imp1Explicit)
+ )
+ // If we find a competitor imp2 which imports the same name, possible outcomes are:
+ //
+ // - same depth, imp1 wild, imp2 explicit: imp2 wins, drop imp1
+ // - same depth, imp1 wild, imp2 wild: ambiguity check
+ // - same depth, imp1 explicit, imp2 explicit: ambiguity check
+ // - differing depth, imp1 wild, imp2 explicit: ambiguity check
+ // - all others: imp1 wins, drop imp2
+ //
+ // The ambiguity check is: if we can verify that both imports refer to the same
+ // symbol (e.g. import foo.X followed by import foo._) then we discard imp2
+ // and proceed. If we cannot, issue an ambiguity error.
+ while (keepLooking) {
+ // If not at the same depth, limit the lookup to explicit imports.
+ // This is desirable from a performance standpoint (compare to
+ // filtering after the fact) but also necessary to keep the unused
+ // import check from being misled by symbol lookups which are not
+ // actually used.
+ val other = lookupImport(imp2, requireExplicit = !sameDepth)
+ def imp1wins = { imports = imp1 :: imports.tail.tail }
+ def imp2wins = { impSym = other ; imports = imports.tail }
+
+ if (!other.exists) // imp1 wins; drop imp2 and continue.
+ imp1wins
+ else if (sameDepth && !imp1Explicit && imp2Explicit) // imp2 wins; drop imp1 and continue.
+ imp2wins
+ else resolveAmbiguousImport(name, imp1, imp2) match {
+ case Some(imp) => if (imp eq imp1) imp1wins else imp2wins
+ case _ => lookupError = ambiguousImports(imp1, imp2)
+ }
+ }
+ // optimization: don't write out package prefixes
+ finish(resetPos(imp1.qual.duplicate), impSym)
+ }
+ else finish(EmptyTree, NoSymbol)
+ }
+
/**
* Find a symbol in this context or one of its outers.
*
@@ -730,11 +1004,14 @@ trait Contexts { self: Analyzer =>
} //class Context
class ImportInfo(val tree: Import, val depth: Int) {
+ def pos = tree.pos
+ def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos
+
/** The prefix expression */
def qual: Tree = tree.symbol.info match {
case ImportType(expr) => expr
- case ErrorType => tree setType NoType // fix for #2870
- case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
+ case ErrorType => tree setType NoType // fix for #2870
+ case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
}
/** Is name imported explicitly, not via wildcard? */
@@ -743,25 +1020,53 @@ trait Contexts { self: Analyzer =>
/** The symbol with name `name` imported from import clause `tree`.
*/
- def importedSymbol(name: Name): Symbol = {
+ def importedSymbol(name: Name): Symbol = importedSymbol(name, requireExplicit = false)
+
+ private def recordUsage(sel: ImportSelector, result: Symbol) {
+ def posstr = pos.source.file.name + ":" + posOf(sel).safeLine
+ def resstr = if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" else s"(expr=${tree.expr}, ${result.fullLocationString})"
+ debuglog(s"In $this at $posstr, selector '${selectorString(sel)}' resolved to $resstr")
+ allUsedSelectors(this) += sel
+ }
+
+ /** If requireExplicit is true, wildcard imports are not considered. */
+ def importedSymbol(name: Name, requireExplicit: Boolean): Symbol = {
var result: Symbol = NoSymbol
var renamed = false
var selectors = tree.selectors
- while (selectors != Nil && result == NoSymbol) {
- if (selectors.head.rename == name.toTermName)
+ def current = selectors.head
+ while (selectors.nonEmpty && result == NoSymbol) {
+ if (current.rename == name.toTermName)
result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
- if (name.isTypeName) selectors.head.name.toTypeName else selectors.head.name)
- else if (selectors.head.name == name.toTermName)
+ if (name.isTypeName) current.name.toTypeName else current.name)
+ else if (current.name == name.toTermName)
renamed = true
- else if (selectors.head.name == nme.WILDCARD && !renamed)
+ else if (current.name == nme.WILDCARD && !renamed && !requireExplicit)
result = qual.tpe.nonLocalMember(name)
- selectors = selectors.tail
+
+ if (result == NoSymbol)
+ selectors = selectors.tail
}
- result
+ if (settings.lint.value && selectors.nonEmpty && result != NoSymbol && pos != NoPosition)
+ recordUsage(current, result)
+
+ // Harden against the fallout from bugs like SI-6745
+ //
+ // [JZ] I considered issuing a devWarning and moving the
+ // check inside the above loop, as I believe that
+ // this always represents a mistake on the part of
+ // the caller.
+ if (definitions isImportable result) result
+ else NoSymbol
+ }
+ private def selectorString(s: ImportSelector): String = {
+ if (s.name == nme.WILDCARD && s.rename == null) "_"
+ else if (s.name == s.rename) "" + s.name
+ else s.name + " => " + s.rename
}
def allImportedSymbols: Iterable[Symbol] =
- qual.tpe.members flatMap (transformImport(tree.selectors, _))
+ importableMembers(qual.tpe) flatMap (transformImport(tree.selectors, _))
private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match {
case List() => List()
@@ -772,7 +1077,12 @@ trait Contexts { self: Analyzer =>
case _ :: rest => transformImport(rest, sym)
}
- override def toString() = tree.toString()
+ override def hashCode = tree.##
+ override def equals(other: Any) = other match {
+ case that: ImportInfo => (tree == that.tree)
+ case _ => false
+ }
+ override def toString = tree.toString
}
case class ImportType(expr: Tree) extends Type {
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
index 3e249e57bb..73572bcae9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package typechecker
-import scala.language.implicitConversions
-
/** A generic means of breaking down types into their subcomponents.
* Types are decomposed top down, and recognizable substructure is
* dispatched via self-apparently named methods. Those methods can
@@ -37,8 +35,6 @@ trait DestructureTypes {
def wrapSequence(nodes: List[Node]): Node
def wrapAtom[U](value: U): Node
- private implicit def liftToTerm(name: String): TermName = newTermName(name)
-
private val openSymbols = scala.collection.mutable.Set[Symbol]()
private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
@@ -68,15 +64,6 @@ trait DestructureTypes {
},
tree.productPrefix
)
- def wrapSymbol(label: String, sym: Symbol): Node = {
- if (sym eq NoSymbol) wrapEmpty
- else atom(label, sym)
- }
- def wrapInfo(sym: Symbol) = sym.info match {
- case TypeBounds(lo, hi) => typeBounds(lo, hi)
- case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
- case _ => wrapEmpty
- }
def wrapSymbolInfo(sym: Symbol): Node = {
if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty
else {
@@ -99,7 +86,6 @@ trait DestructureTypes {
def constant(label: String, const: Constant): Node = atom(label, const)
def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList))
- def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2))
def resultType(restpe: Type): Node = this("resultType", restpe)
def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps))
@@ -188,7 +174,6 @@ trait DestructureTypes {
case AntiPolyType(pre, targs) => product(tp, prefix(pre), typeArgs(targs))
case ClassInfoType(parents, decls, clazz) => product(tp, parentList(parents), scope(decls), wrapAtom(clazz))
case ConstantType(const) => product(tp, constant("value", const))
- case DeBruijnIndex(level, index, args) => product(tp, const("level" -> level), const("index" -> index), typeArgs(args))
case OverloadedType(pre, alts) => product(tp, prefix(pre), node("alts", typeList(alts map pre.memberType)))
case RefinedType(parents, decls) => product(tp, parentList(parents), scope(decls))
case SingleType(pre, sym) => product(tp, prefix(pre), wrapAtom(sym))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index f6142a81be..ad45fc0354 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -19,11 +19,6 @@ abstract class Duplicators extends Analyzer {
import global._
import definitions.{ AnyRefClass, AnyValClass }
- def retyped(context: Context, tree: Tree): Tree = {
- resetClassOwners
- (newBodyDuplicator(context)).typed(tree)
- }
-
/** Retype the given tree in the given context. Use this method when retyping
* a method in a different class. The typer will replace references to the this of
* the old class with the new class, and map symbols through the given 'env'. The
@@ -33,7 +28,7 @@ abstract class Duplicators extends Analyzer {
if (oldThis ne newThis) {
oldClassOwner = oldThis
newClassOwner = newThis
- } else resetClassOwners
+ } else resetClassOwners()
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
debuglog("retyped with env: " + env)
@@ -42,9 +37,6 @@ abstract class Duplicators extends Analyzer {
protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
- def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree =
- (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
-
/** Return the special typer for duplicate method bodies. */
override def newTyper(context: Context): Typer =
newBodyDuplicator(context)
@@ -186,31 +178,6 @@ abstract class Duplicators extends Analyzer {
stats.foreach(invalidate(_, owner))
}
- def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = {
- oldClassOwner = oldThis
- newClassOwner = newThis
- invalidateAll(ddef.tparams)
- mforeach(ddef.vparamss) { vdef =>
- invalidate(vdef)
- vdef.tpe = null
- }
- ddef.symbol = NoSymbol
- enterSym(context, ddef)
- debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner)
- typed(ddef)
- }
-
- private def inspectTpe(tpe: Type) = {
- tpe match {
- case MethodType(_, res) =>
- res + ", " + res.bounds.hi + ", " + (res.bounds.hi match {
- case TypeRef(_, _, args) if (args.length > 0) => args(0) + ", " + args(0).bounds.hi
- case _ => "non-tref: " + res.bounds.hi.getClass
- })
- case _ =>
- }
- }
-
/** Optionally cast this tree into some other type, if required.
* Unless overridden, just returns the tree.
*/
@@ -230,10 +197,10 @@ abstract class Duplicators extends Analyzer {
* their symbols are recreated ad-hoc and their types are fixed inline, instead of letting the
* namer/typer handle them, or Idents that refer to them.
*/
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
debuglog("typing " + tree + ": " + tree.tpe + ", " + tree.getClass)
val origtreesym = tree.symbol
- if (tree.hasSymbol && tree.symbol != NoSymbol
+ if (tree.hasSymbolField && tree.symbol != NoSymbol
&& !tree.symbol.isLabel // labels cannot be retyped by the type checker as LabelDef has no ValDef/return type trees
&& invalidSyms.isDefinedAt(tree.symbol)) {
debuglog("removed symbol " + tree.symbol)
@@ -243,40 +210,35 @@ abstract class Duplicators extends Analyzer {
tree match {
case ttree @ TypeTree() =>
// log("fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol)
- ttree.tpe = fixType(ttree.tpe)
- ttree
+ ttree modifyType fixType
case Block(stats, res) =>
debuglog("invalidating block")
invalidateAll(stats)
invalidate(res)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case ClassDef(_, _, _, tmpl @ Template(parents, _, stats)) =>
// log("invalidating classdef " + tree)
tmpl.symbol = tree.symbol.newLocalDummy(tree.pos)
invalidateAll(stats, tree.symbol)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case ddef @ DefDef(_, _, _, _, tpt, rhs) =>
- ddef.tpt.tpe = fixType(ddef.tpt.tpe)
- ddef.tpe = null
- super.typed(ddef, mode, pt)
+ ddef.tpt modifyType fixType
+ super.typed(ddef.clearType(), mode, pt)
case vdef @ ValDef(mods, name, tpt, rhs) =>
// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
//if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
- vdef.tpt.tpe = fixType(vdef.tpt.tpe)
- vdef.tpe = null
- super.typed(vdef, mode, pt)
+ vdef.tpt modifyType fixType
+ super.typed(vdef.clearType(), mode, pt)
case ldef @ LabelDef(name, params, rhs) =>
// log("label def: " + ldef)
// in case the rhs contains any definitions -- TODO: is this necessary?
invalidate(rhs)
- ldef.tpe = null
+ ldef.clearType()
// is this LabelDef generated by tailcalls?
val isTailLabel = (ldef.params.length >= 1) && (ldef.params.head.name == nme.THIS)
@@ -294,27 +256,23 @@ abstract class Duplicators extends Analyzer {
val params1 = params map newParam
val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
- rhs1.tpe = null
- super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt)
+ super.typed(treeCopy.LabelDef(tree, name, params1, rhs1.clearType()), mode, pt)
case Bind(name, _) =>
// log("bind: " + tree)
invalidate(tree)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Ident(_) if tree.symbol.isLabel =>
debuglog("Ident to labeldef " + tree + " switched to ")
tree.symbol = updateSym(tree.symbol)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Ident(_) if (origtreesym ne null) && origtreesym.isLazy =>
debuglog("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym)
tree.symbol = updateSym(origtreesym)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
// We use the symbol name instead of the tree name because the symbol
@@ -336,9 +294,15 @@ abstract class Duplicators extends Analyzer {
case ((alt, tpe)) :: Nil =>
log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
Select(This(newClassOwner), alt)
- case _ =>
- log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
- nameSelection
+ case xs =>
+ alts filter (alt => (alt.paramss corresponds tree.symbol.paramss)(_.size == _.size)) match {
+ case alt :: Nil =>
+ log(s"Resorted to parameter list arity to disambiguate to $alt\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberTypes. Attempting name-based selection, but we may crash later.")
+ nameSelection
+ }
}
}
else nameSelection
@@ -397,7 +361,7 @@ abstract class Duplicators extends Analyzer {
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
debuglog(" ---> " + tree)
- if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
+ if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
val ntree = castType(tree, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index bbba7e0435..80dfef6c7b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -33,7 +33,7 @@ trait EtaExpansion { self: Analyzer =>
}
/** <p>
- * Expand partial function applications of type <code>type</code>.
+ * Expand partial function applications of type `type`.
* </p><pre>
* p.f(es_1)...(es_n)
* ==> {
@@ -56,11 +56,8 @@ trait EtaExpansion { self: Analyzer =>
}
val defs = new ListBuffer[Tree]
- /** Append to <code>defs</code> value definitions for all non-stable
- * subexpressions of the function application <code>tree</code>.
- *
- * @param tree ...
- * @return ...
+ /** Append to `defs` value definitions for all non-stable
+ * subexpressions of the function application `tree`.
*/
def liftoutPrefix(tree: Tree): Tree = {
def liftout(tree: Tree, byName: Boolean): Tree =
@@ -97,11 +94,11 @@ trait EtaExpansion { self: Analyzer =>
// with repeated params, there might be more or fewer args than params
liftout(arg, byName(i).getOrElse(false))
}
- treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
+ treeCopy.Apply(tree, liftoutPrefix(fn), newArgs).clearType()
case TypeApply(fn, args) =>
- treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
+ treeCopy.TypeApply(tree, liftoutPrefix(fn), args).clearType()
case Select(qual, name) =>
- treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
+ treeCopy.Select(tree, liftout(qual, byName = false), name).clearType() setSymbol NoSymbol
case Ident(name) =>
tree
}
@@ -118,7 +115,7 @@ trait EtaExpansion { self: Analyzer =>
val origTpe = sym.tpe
val isRepeated = definitions.isRepeatedParamType(origTpe)
// SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala
- val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe)
+ val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropIllegalStarTypes(origTpe)
val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree)
(valDef, isRepeated)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 01ae0a7a94..2331f82a58 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -30,11 +30,11 @@ trait Implicits {
import global._
import definitions._
import ImplicitsStats._
- import typeDebug.{ ptTree, ptBlock, ptLine }
+ import typeDebug.{ ptBlock, ptLine }
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
- inferImplicit(tree, pt, reportAmbiguous, isView, context, true, tree.pos)
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent = true, tree.pos)
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult =
inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos)
@@ -82,7 +82,7 @@ trait Implicits {
val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit
if (saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent))
- debugwarn("update buffer: " + implicitSearchContext.errBuffer)
+ debuglog("update buffer: " + implicitSearchContext.errBuffer)
}
printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
@@ -112,7 +112,7 @@ trait Implicits {
val tvars = tpars map (TypeVar untouchable _)
val tpSubsted = tp.subst(tpars, tvars)
- val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false))
+ val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(reportAmbiguousErrors = false))
search.allImplicitsPoly(tvars)
}
@@ -132,7 +132,7 @@ trait Implicits {
}
/* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
- * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
+ * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate debruijn index types
* when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
* so we have to approximate (otherwise it is excluded a priori).
*/
@@ -149,7 +149,7 @@ trait Implicits {
class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) {
override def toString = "SearchResult(%s, %s)".format(tree,
if (subst.isEmpty) "" else subst)
-
+
def isFailure = false
def isAmbiguousFailure = false
final def isSuccess = !isFailure
@@ -158,7 +158,7 @@ trait Implicits {
lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
}
-
+
lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
override def isAmbiguousFailure = true
@@ -198,15 +198,7 @@ trait Implicits {
tp.isError
}
- /** Todo reconcile with definition of stability given in Types.scala */
- private def isStable(tp: Type): Boolean = tp match {
- case TypeRef(pre, sym, _) =>
- sym.isPackageClass ||
- sym.isModuleClass && isStable(pre) /*||
- sym.isAliasType && isStable(tp.normalize)*/
- case _ => tp.isStable
- }
- def isStablePrefix = isStable(pre)
+ def isStablePrefix = pre.isStable
override def equals(other: Any) = other match {
case that: ImplicitInfo =>
@@ -244,11 +236,7 @@ trait Implicits {
object HasMember {
private val hasMemberCache = perRunCaches.newMap[Name, Type]()
def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType))
- def unapply(pt: Type): Option[Name] = pt match {
- case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name)
- case _ => None
}
- }
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
@@ -351,7 +339,7 @@ trait Implicits {
* if one or both are intersection types with a pair of overlapping parent types.
*/
private def dominates(dtor: Type, dted: Type): Boolean = {
- def core(tp: Type): Type = tp.normalize match {
+ def core(tp: Type): Type = tp.dealiasWiden match {
case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner)
case AnnotatedType(annots, tp, selfsym) => core(tp)
case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
@@ -366,11 +354,11 @@ trait Implicits {
deriveTypeWithWildcards(syms.distinct)(tp)
}
def sum(xs: List[Int]) = (0 /: xs)(_ + _)
- def complexity(tp: Type): Int = tp.normalize match {
+ def complexity(tp: Type): Int = tp.dealiasWiden match {
case NoPrefix =>
0
case SingleType(pre, sym) =>
- if (sym.isPackage) 0 else complexity(tp.normalize.widen)
+ if (sym.isPackage) 0 else complexity(tp.dealiasWiden)
case TypeRef(pre, sym, args) =>
complexity(pre) + sum(args map complexity) + 1
case RefinedType(parents, _) =>
@@ -642,7 +630,7 @@ trait Implicits {
printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt),
- false, lubDepth(List(itree2.tpe, pt)))
+ upper = false, lubDepth(List(itree2.tpe, pt)))
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
@@ -672,11 +660,11 @@ trait Implicits {
// duplicating the code here, but this is probably a
// hotspot (and you can't just call typed, need to force
// re-typecheck)
- // TODO: the return tree is ignored. This seems to make
- // no difference, but it's bad practice regardless.
-
-
- val checked = itree2 match {
+ //
+ // This is just called for the side effect of error detection,
+ // see SI-6966 to see what goes wrong if we use the result of this
+ // as the SearchResult.
+ itree2 match {
case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
case t => t
@@ -1005,7 +993,7 @@ trait Implicits {
case Some(imap) => imap
case None =>
val result = new InfoMap
- getClassParts(sym.tpe)(result, new mutable.HashSet(), pending + sym)
+ getClassParts(sym.tpeHK)(result, new mutable.HashSet(), pending + sym)
infoMapCache(sym) = result
result
}
@@ -1117,7 +1105,7 @@ trait Implicits {
case ThisType(thisSym) =>
gen.mkAttributedThis(thisSym)
case _ =>
- // if ``pre'' is not a PDT, e.g. if someone wrote
+ // if `pre` is not a PDT, e.g. if someone wrote
// implicitly[scala.reflect.macros.Context#TypeTag[Int]]
// then we need to fail, because we don't know the prefix to use during type reification
// upd. we also need to fail silently, because this is a very common situation
@@ -1131,7 +1119,7 @@ trait Implicits {
}
)
// todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
- var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
+ val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
if (context.macrosEnabled) success(materializer)
// don't call `failure` here. if macros are disabled, we just fail silently
@@ -1166,7 +1154,7 @@ trait Implicits {
/** Re-wraps a type in a manifest before calling inferImplicit on the result */
def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
- inferImplicit(tree, appliedType(manifestClass, tp), true, false, context).tree
+ inferImplicit(tree, appliedType(manifestClass, tp), reportAmbiguous = true, isView = false, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
@@ -1301,7 +1289,7 @@ trait Implicits {
val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null
val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null
- var result = searchImplicit(context.implicitss, true)
+ var result = searchImplicit(context.implicitss, isLocal = true)
if (result.isFailure) {
if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart)
@@ -1319,17 +1307,12 @@ trait Implicits {
// `materializeImplicit` does some preprocessing for `pt`
// is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
- if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false)
+ if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, isLocal = false)
if (result.isFailure) {
context.updateBuffer(previousErrs)
if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- if (wasAmbigious && settings.lint.value)
- reporter.warning(tree.pos,
- "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" +
- previousErrs.map(_.errMsg).mkString("\n"))
-
if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
@@ -1343,7 +1326,7 @@ trait Implicits {
def allImplicits: List[SearchResult] = {
def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values
- (search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree)
+ (search(context.implicitss, isLocal = true) ++ search(implicitsOfExpectedType, isLocal = false)).toList.filter(_.tree ne EmptyTree)
}
// find all implicits for some type that contains type variables
@@ -1404,7 +1387,6 @@ trait Implicits {
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
def validate: Option[String] = {
- import scala.util.matching.Regex; import scala.collection.breakOut
// is there a shorter way to avoid the intermediate toList?
val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet
val decls = typeParamNames.toSet
@@ -1430,9 +1412,7 @@ object ImplicitsStats {
val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount)
val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount)
- val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount)
val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer")
- val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount)
val plausiblyCompatibleImplicits
= Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount)
val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 7161043dcf..9f16f65a6a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -6,11 +6,10 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
import scala.collection.mutable.ListBuffer
import scala.util.control.ControlThrowable
import symtab.Flags._
-import scala.annotation.tailrec
/** This trait ...
*
@@ -30,8 +29,8 @@ trait Infer extends Checkable {
private def assertNonCyclic(tvar: TypeVar) =
assert(tvar.constr.inst != tvar, tvar.origin)
- /** The formal parameter types corresponding to <code>formals</code>.
- * If <code>formals</code> has a repeated last parameter, a list of
+ /** The formal parameter types corresponding to `formals`.
+ * If `formals` has a repeated last parameter, a list of
* (nargs - params.length + 1) copies of its type is returned.
* By-name types are replaced with their underlying type.
*
@@ -49,6 +48,24 @@ trait Infer extends Checkable {
} else formals1
}
+ /** Sorts the alternatives according to the given comparison function.
+ * Returns a list containing the best alternative as well as any which
+ * the best fails to improve upon.
+ */
+ private def bestAlternatives(alternatives: List[Symbol])(isBetter: (Symbol, Symbol) => Boolean): List[Symbol] = {
+ def improves(sym1: Symbol, sym2: Symbol) = (
+ sym2 == NoSymbol
+ || sym2.isError
+ || sym2.hasAnnotation(BridgeClass)
+ || isBetter(sym1, sym2)
+ )
+
+ alternatives sortWith improves match {
+ case best :: rest if rest.nonEmpty => best :: rest.filterNot(alt => improves(best, alt))
+ case bests => bests
+ }
+ }
+
/** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
* for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
* unapply[Seq] call is assumed to have result type `resTp`.
@@ -133,21 +150,7 @@ trait Infer extends Checkable {
else (formals, formalsExpanded)
}
- def actualTypes(actuals: List[Type], nformals: Int): List[Type] =
- if (nformals == 1 && !hasLength(actuals, 1))
- List(if (actuals.isEmpty) UnitClass.tpe else tupleType(actuals))
- else actuals
-
- def actualArgs(pos: Position, actuals: List[Tree], nformals: Int): List[Tree] = {
- val inRange = nformals == 1 && !hasLength(actuals, 1) && actuals.lengthCompare(MaxTupleArity) <= 0
- if (inRange && !phase.erasedTypes) List(atPos(pos)(gen.mkTuple(actuals)))
- else actuals
- }
-
/** A fresh type variable with given type parameter as origin.
- *
- * @param tparam ...
- * @return ...
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
@@ -170,14 +173,13 @@ trait Infer extends Checkable {
case tv @ TypeVar(origin, constr) if !tv.untouchable =>
if (constr.inst == NoType) {
throw new DeferredNoInstance(() =>
- "no unique instantiation of type variable " + origin + " could be found")
+ s"no unique instantiation of type variable $origin could be found")
} else if (excludedVars(tv)) {
throw new NoInstance("cyclic instantiation")
} else {
excludedVars += tv
- val res = apply(constr.inst)
- excludedVars -= tv
- res
+ try apply(constr.inst)
+ finally excludedVars -= tv
}
case _ =>
mapOver(tp)
@@ -185,9 +187,6 @@ trait Infer extends Checkable {
}
/** Is type fully defined, i.e. no embedded anytypes or wildcards in it?
- *
- * @param tp ...
- * @return ...
*/
private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match {
case WildcardType | BoundedWildcardType(_) | NoType =>
@@ -220,7 +219,7 @@ trait Infer extends Checkable {
* @throws NoInstance
*/
def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean, depth: Int): List[Type] = {
+ variances: List[Variance], upper: Boolean, depth: Int): List[Type] = {
if (tvars.nonEmpty)
printInference("[solve types] solving for " + tparams.map(_.name).mkString(", ") + " in " + tvars.mkString(", "))
@@ -259,6 +258,8 @@ trait Infer extends Checkable {
* This method seems to be performance critical.
*/
def normalize(tp: Type): Type = tp match {
+ case pt @ PolyType(tparams, restpe) =>
+ logResult(s"Normalizing $tp in infer")(normalize(restpe))
case mt @ MethodType(params, restpe) if mt.isImplicit =>
normalize(restpe)
case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
@@ -291,7 +292,7 @@ trait Infer extends Checkable {
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
- if (tree.hasSymbol)
+ if (tree.hasSymbolField)
tree setSymbol errorSym
tree setType ErrorType
@@ -317,8 +318,8 @@ trait Infer extends Checkable {
/* -- Tests & Checks---------------------------------------------------- */
- /** Check that <code>sym</code> is defined and accessible as a member of
- * tree <code>site</code> with type <code>pre</code> in current context.
+ /** Check that `sym` is defined and accessible as a member of
+ * tree `site` with type `pre` in current context.
*
* Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
* since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
@@ -327,7 +328,6 @@ trait Infer extends Checkable {
if (sym.isError) {
tree setSymbol sym setType ErrorType
} else {
- val topClass = context.owner.enclosingTopLevelClass
if (context.unit.exists)
context.unit.depends += sym.enclosingTopLevelClass
@@ -471,14 +471,9 @@ trait Infer extends Checkable {
}
/** Return inferred type arguments of polymorphic expression, given
- * its type parameters and result type and a prototype <code>pt</code>.
+ * its type parameters and result type and a prototype `pt`.
* If no minimal type variables exist that make the
- * instantiated type a subtype of <code>pt</code>, return null.
- *
- * @param tparams ...
- * @param restpe ...
- * @param pt ...
- * @return ...
+ * instantiated type a subtype of `pt`, return null.
*/
private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): (List[Type], List[TypeVar]) = {
val tvars = tparams map freshVar
@@ -496,7 +491,7 @@ trait Infer extends Checkable {
}
//println("try to solve "+tvars+" "+tparams)
(solvedTypes(tvars, tparams, tparams map varianceInType(varianceType),
- false, lubDepth(List(restpe, pt))), tvars)
+ upper = false, lubDepth(List(restpe, pt))), tvars)
} catch {
case ex: NoInstance => (null, null)
}
@@ -505,24 +500,18 @@ trait Infer extends Checkable {
/** Return inferred proto-type arguments of function, given
* its type and value parameters and result type, and a
- * prototype <code>pt</code> for the function result.
+ * prototype `pt` for the function result.
* Type arguments need to be either determined precisely by
* the prototype, or they are maximized, if they occur only covariantly
* in the value parameter list.
* If instantiation of a type parameter fails,
* take WildcardType for the proto-type argument.
- *
- * @param tparams ...
- * @param formals ...
- * @param restype ...
- * @param pt ...
- * @return ...
*/
def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
pt: Type): List[Type] = {
/** Map type variable to its instance, or, if `variance` is covariant/contravariant,
* to its upper/lower bound */
- def instantiateToBound(tvar: TypeVar, variance: Int): Type = try {
+ def instantiateToBound(tvar: TypeVar, variance: Variance): Type = {
lazy val hiBounds = tvar.constr.hiBounds
lazy val loBounds = tvar.constr.loBounds
lazy val upper = glb(hiBounds)
@@ -535,23 +524,21 @@ trait Infer extends Checkable {
//Console.println("instantiate "+tvar+tvar.constr+" variance = "+variance);//DEBUG
if (tvar.constr.inst != NoType)
instantiate(tvar.constr.inst)
- else if ((variance & COVARIANT) != 0 && hiBounds.nonEmpty)
- setInst(upper)
- else if ((variance & CONTRAVARIANT) != 0 && loBounds.nonEmpty)
+ else if (loBounds.nonEmpty && variance.isContravariant)
setInst(lower)
- else if (hiBounds.nonEmpty && loBounds.nonEmpty && upper <:< lower)
+ else if (hiBounds.nonEmpty && (variance.isPositive || loBounds.nonEmpty && upper <:< lower))
setInst(upper)
else
WildcardType
- } catch {
- case ex: NoInstance => WildcardType
}
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
map2(tparams, tvars)((tparam, tvar) =>
- instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
+ try instantiateToBound(tvar, varianceInTypes(formals)(tparam))
+ catch { case ex: NoInstance => WildcardType }
+ )
else
- tvars map (tvar => WildcardType)
+ tvars map (_ => WildcardType)
}
/** [Martin] Can someone comment this please? I have no idea what it's for
@@ -598,7 +585,7 @@ trait Infer extends Checkable {
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
* @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
- * type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
+ * type parameters that are inferred as `scala.Nothing` and that are not covariant in `restpe` are taken to be undetermined
*/
def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
@@ -606,33 +593,32 @@ trait Infer extends Checkable {
foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
val retract = (
targ.typeSymbol == NothingClass // only retract Nothings
- && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
+ && (restpe.isWildcard || !varianceInType(restpe)(tparam).isPositive) // don't retract covariant occurrences
)
- // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
buf += ((tparam,
if (retract) None
else Some(
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
// this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
- else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
+ else if (targ.typeSymbol.isModuleClass || tvar.constr.avoidWiden) targ
else targ.widen
)
))
}
- buf.result
+ buf.result()
}
/** Return inferred type arguments, given type parameters, formal parameters,
* argument types, result type and expected result type.
- * If this is not possible, throw a <code>NoInstance</code> exception.
+ * If this is not possible, throw a `NoInstance` exception.
* Undetermined type arguments are represented by `definitions.NothingClass.tpe`.
* No check that inferred parameters conform to their bounds is made here.
*
* @param tparams the type parameters of the method
* @param formals the value parameter types of the method
- * @param restp the result type of the method
+ * @param restpe the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
* @return @see adjustTypeArgs
@@ -681,11 +667,59 @@ trait Infer extends Checkable {
}
val targs = solvedTypes(
tvars, tparams, tparams map varianceInTypes(formals),
- false, lubDepth(formals) max lubDepth(argtpes)
+ upper = false, lubDepth(formals) max lubDepth(argtpes)
)
+ // Can warn about inferring Any/AnyVal as long as they don't appear
+ // explicitly anywhere amongst the formal, argument, result, or expected type.
+ def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass)))
+ def argumentPosition(idx: Int): Position = context.tree match {
+ case x: ValOrDefDef => x.rhs match {
+ case Apply(fn, args) if idx < args.size => args(idx).pos
+ case _ => context.tree.pos
+ }
+ case _ => context.tree.pos
+ }
+ if (settings.warnInferAny.value && context.reportErrors && canWarnAboutAny) {
+ foreachWithIndex(targs) ((targ, idx) =>
+ targ.typeSymbol match {
+ case sym @ (AnyClass | AnyValClass) =>
+ context.unit.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
+ case _ =>
+ }
+ )
+ }
adjustTypeArgs(tparams, tvars, targs, restpe)
}
+ /** One must step carefully when assessing applicability due to
+ * complications from varargs, tuple-conversion, named arguments.
+ * This method is used to filter out inapplicable methods,
+ * its behavior slightly configurable based on what stage of
+ * overloading resolution we're at.
+ *
+ * This method has boolean parameters, which is usually suboptimal
+ * but I didn't work out a better way. They don't have defaults,
+ * and the method's scope is limited.
+ */
+ private[typechecker] def isApplicableBasedOnArity(tpe: Type, argsCount: Int, varargsStar: Boolean, tuplingAllowed: Boolean): Boolean = followApply(tpe) match {
+ case OverloadedType(pre, alts) =>
+ alts exists (alt => isApplicableBasedOnArity(pre memberType alt, argsCount, varargsStar, tuplingAllowed))
+ case _ =>
+ val paramsCount = tpe.params.length
+ val simpleMatch = paramsCount == argsCount
+ val varargsTarget = isVarArgsList(tpe.params)
+ def varargsMatch = varargsTarget && (paramsCount - 1) <= argsCount
+ def tuplingMatch = tuplingAllowed && eligibleForTupleConversion(paramsCount, argsCount, varargsTarget)
+
+ // A varargs star call, e.g. (x, y:_*) can only match a varargs method
+ // with the same number of parameters. See SI-5859 for an example of what
+ // would fail were this not enforced before we arrived at isApplicable.
+ if (varargsStar)
+ varargsTarget && simpleMatch
+ else
+ simpleMatch || varargsMatch || tuplingMatch
+ }
+
private[typechecker] def followApply(tp: Type): Type = tp match {
case NullaryMethodType(restp) =>
val restp1 = followApply(restp)
@@ -702,14 +736,6 @@ trait Infer extends Checkable {
else OverloadedType(tp, appmeth.alternatives)
}
- def hasExactlyNumParams(tp: Type, n: Int): Boolean = tp match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => hasExactlyNumParams(pre.memberType(alt), n))
- case _ =>
- val len = tp.params.length
- len == n || isVarArgsList(tp.params) && len <= n + 1
- }
-
/**
* Verifies whether the named application is valid. The logic is very
* similar to the one in NamesDefaults.removeNames.
@@ -755,48 +781,75 @@ trait Infer extends Checkable {
(argtpes1, argPos, namesOK)
}
- /** don't do a () to (()) conversion for methods whose second parameter
- * is a varargs. This is a fairly kludgey way to address #3224.
- * We'll probably find a better way to do this by identifying
- * tupled and n-ary methods, but thiws is something for a future major revision.
+ /** True if the given parameter list can accept a tupled argument list,
+ * and the argument list can be tupled (based on its length.)
*/
- def isUnitForVarArgs(args: List[AnyRef], params: List[Symbol]): Boolean =
- args.isEmpty && hasLength(params, 2) && isVarArgsList(params)
+ def eligibleForTupleConversion(paramsCount: Int, argsCount: Int, varargsTarget: Boolean): Boolean = {
+ def canSendTuple = argsCount match {
+ case 0 => !varargsTarget // avoid () to (()) conversion - SI-3224
+ case 1 => false // can't tuple a single argument
+ case n => n <= MaxTupleArity // <= 22 arguments
+ }
+ def canReceiveTuple = paramsCount match {
+ case 1 => true
+ case 2 => varargsTarget
+ case _ => false
+ }
+ canSendTuple && canReceiveTuple
+ }
+ def eligibleForTupleConversion(formals: List[Type], argsCount: Int): Boolean = formals match {
+ case p :: Nil => eligibleForTupleConversion(1, argsCount, varargsTarget = isScalaRepeatedParamType(p))
+ case _ :: p :: Nil if isScalaRepeatedParamType(p) => eligibleForTupleConversion(2, argsCount, varargsTarget = true)
+ case _ => false
+ }
- /** Is there an instantiation of free type variables <code>undetparams</code>
- * such that function type <code>ftpe</code> is applicable to
- * <code>argtpes</code> and its result conform to <code>pt</code>?
+ /** The type of an argument list after being coerced to a tuple.
+ * @pre: the argument list is eligible for tuple conversion.
+ */
+ private def typeAfterTupleConversion(argtpes: List[Type]): Type = (
+ if (argtpes.isEmpty) UnitClass.tpe // aka "Tuple0"
+ else tupleType(argtpes map {
+ case NamedType(name, tp) => UnitClass.tpe // not a named arg - only assignments here
+ case RepeatedType(tp) => tp // but probably shouldn't be tupling a call containing :_*
+ case tp => tp
+ })
+ )
+
+ /** If the argument list needs to be tupled for the parameter list,
+ * a list containing the type of the tuple. Otherwise, the original
+ * argument list.
+ */
+ def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = {
+ if (eligibleForTupleConversion(formals, argtpes.size))
+ typeAfterTupleConversion(argtpes) :: Nil
+ else
+ argtpes
+ }
+
+ /** Is there an instantiation of free type variables `undetparams`
+ * such that function type `ftpe` is applicable to
+ * `argtpes` and its result conform to `pt`?
*
- * @param undetparams ...
* @param ftpe the type of the function (often a MethodType)
- * @param argtpes the argument types; a NamedType(name, tp) for named
+ * @param argtpes0 the argument types; a NamedType(name, tp) for named
* arguments. For each NamedType, if `name` does not exist in `ftpe`, that
* type is set to `Unit`, i.e. the corresponding argument is treated as
* an assignment expression (@see checkNames).
- * @param pt ...
- * @return ...
*/
private def isApplicable(undetparams: List[Symbol], ftpe: Type,
argtpes0: List[Type], pt: Type): Boolean =
ftpe match {
case OverloadedType(pre, alts) =>
- alts exists (alt => isApplicable(undetparams, pre.memberType(alt), argtpes0, pt))
+ alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt))
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
case mt @ MethodType(params, _) =>
- val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
-
- def tryTupleApply: Boolean = {
- // if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
- val tupleArgTpes = actualTypes(argtpes0 map {
- // no assignment is treated as named argument here
- case NamedType(name, tp) => UnitClass.tpe
- case tp => tp
- }, formals.length)
-
- !sameLength(argtpes0, tupleArgTpes) &&
- !isUnitForVarArgs(argtpes0, params) &&
- isApplicable(undetparams, ftpe, tupleArgTpes, pt)
+ val argslen = argtpes0.length
+ val formals = formalTypes(mt.paramTypes, argslen, removeByName = false)
+
+ def tryTupleApply = {
+ val tupled = tupleIfNecessary(mt.paramTypes, argtpes0)
+ (tupled ne argtpes0) && isApplicable(undetparams, ftpe, tupled, pt)
}
def typesCompatible(argtpes: List[Type]) = {
val restpe = ftpe.resultType(argtpes)
@@ -818,17 +871,16 @@ trait Infer extends Checkable {
val lencmp = compareLengths(argtpes0, formals)
if (lencmp > 0) tryTupleApply
else if (lencmp == 0) {
- if (!argtpes0.exists(_.isInstanceOf[NamedType])) {
// fast track if no named arguments are used
+ if (!containsNamedType(argtpes0))
typesCompatible(argtpes0)
- }
else {
// named arguments are used
val (argtpes1, argPos, namesOK) = checkNames(argtpes0, params)
// when using named application, the vararg param has to be specified exactly once
- ( namesOK && (isIdentity(argPos) || sameLength(formals, params)) &&
- // nb. arguments and names are OK, check if types are compatible
- typesCompatible(reorderArgs(argtpes1, argPos))
+ ( namesOK
+ && (allArgsArePositional(argPos) || sameLength(formals, params))
+ && typesCompatible(reorderArgs(argtpes1, argPos)) // nb. arguments and names are OK, check if types are compatible
)
}
}
@@ -862,7 +914,7 @@ trait Infer extends Checkable {
*/
private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type,
argtpes0: List[Type], pt: Type): Boolean = {
- val silentContext = context.makeSilent(false)
+ val silentContext = context.makeSilent(reportAmbiguousErrors = false)
val typer0 = newTyper(silentContext)
val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt)
if (pt != WildcardType && silentContext.hasErrors) {
@@ -872,17 +924,13 @@ trait Infer extends Checkable {
} else res1
}
- /** Is type <code>ftpe1</code> strictly more specific than type <code>ftpe2</code>
+ /** Is type `ftpe1` strictly more specific than type `ftpe2`
* when both are alternatives in an overloaded function?
* @see SLS (sec:overloading-resolution)
- *
- * @param ftpe1 ...
- * @param ftpe2 ...
- * @return ...
*/
def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
case OverloadedType(pre, alts) =>
- alts exists (alt => isAsSpecific(pre.memberType(alt), ftpe2))
+ alts exists (alt => isAsSpecific(pre memberType alt, ftpe2))
case et: ExistentialType =>
isAsSpecific(ftpe1.skolemizeExistential, ftpe2)
//et.withTypeVars(isAsSpecific(_, ftpe2))
@@ -909,7 +957,7 @@ trait Infer extends Checkable {
case _ =>
ftpe2 match {
case OverloadedType(pre, alts) =>
- alts forall (alt => isAsSpecific(ftpe1, pre.memberType(alt)))
+ alts forall (alt => isAsSpecific(ftpe1, pre memberType alt))
case et: ExistentialType =>
et.withTypeVars(isAsSpecific(ftpe1, _))
case mt: MethodType =>
@@ -1053,7 +1101,7 @@ trait Infer extends Checkable {
}
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
- checkKindBounds0(tparams, targs, pre, owner, true) map {
+ checkKindBounds0(tparams, targs, pre, owner, explainErrors = true) map {
case (targ, tparam, kindErrors) =>
kindErrors.errorMessage(targ, tparam)
}
@@ -1123,30 +1171,25 @@ trait Infer extends Checkable {
/** Substitute free type variables `undetparams` of polymorphic argument
* expression `tree` to `targs`, Error if `targs` is null.
- *
- * @param tree ...
- * @param undetparams ...
- * @param targs ...
- * @param pt ...
*/
- private def substExpr(tree: Tree, undetparams: List[Symbol],
- targs: List[Type], pt: Type) {
+ private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type) {
if (targs eq null) {
if (!tree.tpe.isErroneous && !pt.isErroneous)
PolymorphicExpressionInstantiationError(tree, undetparams, pt)
- } else {
+ }
+ else {
new TreeTypeSubstituter(undetparams, targs).traverse(tree)
notifyUndetparamsInferred(undetparams, targs)
}
}
- /** Substitute free type variables <code>undetparams</code> of application
- * <code>fn(args)</code>, given prototype <code>pt</code>.
+ /** Substitute free type variables `undetparams` of application
+ * `fn(args)`, given prototype `pt`.
*
* @param fn fn: the function that needs to be instantiated.
* @param undetparams the parameters that need to be determined
* @param args the actual arguments supplied in the call.
- * @param pt the expected type of the function application
+ * @param pt0 the expected type of the function application
* @return The type parameters that remain uninstantiated,
* and that thus have not been substituted.
*/
@@ -1156,7 +1199,7 @@ trait Infer extends Checkable {
try {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
val formals = formalTypes(mt.paramTypes, args.length)
- val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length)
+ val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst)))
val restpe = fn.tpe.resultType(argtpes)
val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
@@ -1192,17 +1235,15 @@ trait Infer extends Checkable {
}
}
- def widen(tp: Type): Type = abstractTypesToBounds(tp)
-
- /** Substitute free type variables <code>undetparams</code> of type constructor
- * <code>tree</code> in pattern, given prototype <code>pt</code>.
+ /** Substitute free type variables `undetparams` of type constructor
+ * `tree` in pattern, given prototype `pt`.
*
* @param tree the constuctor that needs to be instantiated
* @param undetparams the undetermined type parameters
- * @param pt the expected result type of the instance
+ * @param pt0 the expected result type of the instance
*/
def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) {
- val pt = widen(pt0)
+ val pt = abstractTypesToBounds(pt0)
val ptparams = freeTypeParamsOfTerms(pt)
val ctorTp = tree.tpe
val resTp = ctorTp.finalResultType
@@ -1222,7 +1263,7 @@ trait Infer extends Checkable {
val variances =
if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
else undetparams map varianceInTypes(ctorTp.paramTypes)
- val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
+ val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(List(resTp, pt)))
// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
// TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams
@@ -1262,109 +1303,62 @@ trait Infer extends Checkable {
}
} else None
- (inferFor(pt) orElse inferForApproxPt) map { targs =>
+ val inferred = inferFor(pt) orElse inferForApproxPt
+
+ inferred match {
+ case Some(targs) =>
new TreeTypeSubstituter(undetparams, targs).traverse(tree)
notifyUndetparamsInferred(undetparams, targs)
- } getOrElse {
- debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)"))
+ case _ =>
+ def full = if (isFullyDefined(pt)) "(fully defined)" else "(not fully defined)"
+ devWarning(s"failed inferConstructorInstance for $tree: ${tree.tpe} undet=$undetparams, pt=$pt $full")
// if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt)
ConstrInstantiationError(tree, resTp, pt)
}
}
-
- def instBounds(tvar: TypeVar): (Type, Type) = {
- val tparam = tvar.origin.typeSymbol
- val instType = toOrigin(tvar.constr.inst)
+ def instBounds(tvar: TypeVar): TypeBounds = {
+ val tparam = tvar.origin.typeSymbol
+ val instType = toOrigin(tvar.constr.inst)
+ val TypeBounds(lo, hi) = tparam.info.bounds
val (loBounds, hiBounds) =
- if (instType != NoType && isFullyDefined(instType)) (List(instType), List(instType))
+ if (isFullyDefined(instType)) (List(instType), List(instType))
else (tvar.constr.loBounds, tvar.constr.hiBounds)
- val lo = lub(tparam.info.bounds.lo :: loBounds map toOrigin)
- val hi = glb(tparam.info.bounds.hi :: hiBounds map toOrigin)
- (lo, hi)
+
+ TypeBounds(
+ lub(lo :: loBounds map toOrigin),
+ glb(hi :: hiBounds map toOrigin)
+ )
}
def isInstantiatable(tvars: List[TypeVar]) = {
val tvars1 = tvars map (_.cloneInternal)
// Note: right now it's not clear that solving is complete, or how it can be made complete!
// So we should come back to this and investigate.
- solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (x => COVARIANT), false)
+ solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false)
}
- // this is quite nasty: it destructively changes the info of the syms of e.g., method type params (see #3692, where the type param T's bounds were set to >: T <: T, so that parts looped)
+ // this is quite nasty: it destructively changes the info of the syms of e.g., method type params
+ // (see #3692, where the type param T's bounds were set to > : T <: T, so that parts looped)
// the changes are rolled back by restoreTypeBounds, but might be unintentially observed in the mean time
def instantiateTypeVar(tvar: TypeVar) {
- val tparam = tvar.origin.typeSymbol
- if (false &&
- tvar.constr.inst != NoType &&
- isFullyDefined(tvar.constr.inst) &&
- (tparam.info.bounds containsType tvar.constr.inst)) {
- context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo tvar.constr.inst
- tparam resetFlag DEFERRED
- debuglog("new alias of " + tparam + " = " + tparam.info)
- } else {
- val (lo, hi) = instBounds(tvar)
- if (lo <:< hi) {
- if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi)) // bounds were improved
- && tparam != lo.typeSymbolDirect && tparam != hi.typeSymbolDirect) { // don't create illegal cycles
- context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo TypeBounds(lo, hi)
- debuglog("new bounds of " + tparam + " = " + tparam.info)
- } else {
- debuglog("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
- }
- } else {
- debuglog("inconsistent: "+tparam+" "+lo+" "+hi)
- }
- }
- }
-
- /** Does `tp` contain any types that cannot be checked at run-time (i.e., after erasure, will isInstanceOf[erased(tp)] imply conceptualIsInstanceOf[tp]?)
- * we should find a way to ask erasure: hey, is `tp` going to make it through you with all of its isInstanceOf resolving powers intact?
- * TODO: at the very least, reduce duplication wrt checkCheckable
- */
- def containsUnchecked(tp: Type): Boolean = {
- def check(tp: Type, bound: List[Symbol]): Boolean = {
- def isSurroundingTypeParam(sym: Symbol) = {
- val e = context.scope.lookupEntry(sym.name)
- ( (e ne null)
- && (e.sym == sym )
- && !e.sym.isTypeParameterOrSkolem
- && (e.owner == context.scope)
- )
- }
- def isLocalBinding(sym: Symbol) = (
- sym.isAbstractType && (
- (bound contains sym)
- || (sym.name == tpnme.WILDCARD)
- || isSurroundingTypeParam(sym)
- )
- )
- tp.normalize match {
- case SingleType(pre, _) =>
- check(pre, bound)
- case TypeRef(_, ArrayClass, arg :: _) =>
- check(arg, bound)
- case tp @ TypeRef(pre, sym, args) =>
- ( (sym.isAbstractType && !isLocalBinding(sym))
- || (args exists (x => !isLocalBinding(x.typeSymbol)))
- || check(pre, bound)
- )
- // case RefinedType(_, decls) if decls.nonEmpty =>
- // patternWarning(tp, "refinement ")
- case RefinedType(parents, _) =>
- parents exists (p => check(p, bound))
- case ExistentialType(quantified, tp1) =>
- check(tp1, bound ::: quantified)
- case _ =>
- false
+ val tparam = tvar.origin.typeSymbol
+ val TypeBounds(lo0, hi0) = tparam.info.bounds
+ val tb @ TypeBounds(lo1, hi1) = instBounds(tvar)
+
+ if (lo1 <:< hi1) {
+ if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved
+ log(s"redundant bounds: discarding TypeBounds($lo1, $hi1) for $tparam, no improvement on TypeBounds($lo0, $hi0)")
+ else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect)
+ log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds")
+ else {
+ context.enclosingCaseDef pushTypeBounds tparam
+ tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb)
}
}
- check(tp, Nil)
+ else log(s"inconsistent bounds: discarding TypeBounds($lo1, $hi1)")
}
-
/** Type intersection of simple type tp1 with general type tp2.
* The result eliminates some redundancies.
*/
@@ -1383,7 +1377,7 @@ trait Infer extends Checkable {
}
def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = {
- val pt = widen(pt0)
+ val pt = abstractTypesToBounds(pt0)
val ptparams = freeTypeParamsOfTerms(pt)
val tpparams = freeTypeParamsOfTerms(pattp)
@@ -1503,62 +1497,39 @@ trait Infer extends Checkable {
}
*/
- /** Assign <code>tree</code> the symbol and type of the alternative which
- * matches prototype <code>pt</code>, if it exists.
+ /** Assign `tree` the symbol and type of the alternative which
+ * matches prototype `pt`, if it exists.
* If several alternatives match `pt`, take parameterless one.
* If no alternative matches `pt`, take the parameterless one anyway.
*/
def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
- val noAlternatives = alts0.isEmpty
- val alts1 = if (noAlternatives) alts else alts0
+ val alts1 = if (alts0.isEmpty) alts else alts0
- //println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
- def improves(sym1: Symbol, sym2: Symbol): Boolean =
- sym2 == NoSymbol || sym2.hasAnnotation(BridgeClass) ||
- { val tp1 = pre.memberType(sym1)
+ val bests = bestAlternatives(alts1) { (sym1, sym2) =>
+ val tp1 = pre.memberType(sym1)
val tp2 = pre.memberType(sym2)
- (tp2 == ErrorType ||
- !global.typer.infer.isWeaklyCompatible(tp2, pt) && global.typer.infer.isWeaklyCompatible(tp1, pt) ||
- isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)) }
-
- val best = ((NoSymbol: Symbol) /: alts1) ((best, alt) =>
- if (improves(alt, best)) alt else best)
- val competing = alts1 dropWhile (alt => best == alt || improves(best, alt))
-
- if (best == NoSymbol) {
- if (settings.debug.value) {
- tree match {
- case Select(qual, _) =>
- Console.println("qual: " + qual + ":" + qual.tpe +
- " with decls " + qual.tpe.decls +
- " with members " + qual.tpe.members +
- " with members " + qual.tpe.member(newTermName("$minus")))
- case _ =>
+ ( tp2 == ErrorType
+ || (!isWeaklyCompatible(tp2, pt) && isWeaklyCompatible(tp1, pt))
+ || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)
+ )
}
- }
- // todo: missing test case
- NoBestExprAlternativeError(tree, pt, isSecondTry)
- } else if (!competing.isEmpty) {
- if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
- else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
- else {
+ // todo: missing test case for bests.isEmpty
+ bests match {
+ case best :: Nil => tree setSymbol best setType (pre memberType best)
+ case best :: competing :: _ if alts0.nonEmpty =>
// SI-6912 Don't give up and leave an OverloadedType on the tree.
// Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
// unless an error is issued. We're not issuing an error, in the assumption that it would be
// spurious in light of the erroneous expected type
- setError(tree)
+ if (pt.isErroneous) setError(tree)
+ else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry)
+ case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry)
}
- } else {
-// val applicable = alts1 filter (alt =>
-// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
-// checkNotShadowed(tree.pos, pre, best, applicable)
- tree.setSymbol(best).setType(pre.memberType(best))
}
}
- }
@inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
val oldState = context.state
@@ -1574,105 +1545,94 @@ trait Infer extends Checkable {
private def paramMatchesName(param: Symbol, name: Name) =
param.name == name || param.deprecatedParamName.exists(_ == name)
- // Check the first parameter list the same way.
- private def methodMatchesName(method: Symbol, name: Name) = method.paramss match {
- case ps :: _ => ps exists (p => paramMatchesName(p, name))
- case _ => false
+ private def containsNamedType(argtpes: List[Type]): Boolean = argtpes match {
+ case Nil => false
+ case NamedType(_, _) :: _ => true
+ case _ :: rest => containsNamedType(rest)
}
-
- private def resolveOverloadedMethod(argtpes: List[Type], eligible: List[Symbol]) = {
+ private def namesOfNamedArguments(argtpes: List[Type]) =
+ argtpes collect { case NamedType(name, _) => name }
+
+ /** Given a list of argument types and eligible method overloads, whittle the
+ * list down to the methods which should be considered for specificity
+ * testing, taking into account here:
+ * - named arguments at the call site (keep only methods with name-matching parameters)
+ * - if multiple methods are eligible, drop any methods which take default arguments
+ * - drop any where arity cannot match under any conditions (allowing for
+ * overloaded applies, varargs, and tupling conversions)
+ * This method is conservative; it can tolerate some varieties of false positive,
+ * but no false negatives.
+ *
+ * @param eligible the overloaded method symbols
+ * @param argtpes the argument types at the call site
+ * @param varargsStar true if the call site has a `: _*` attached to the last argument
+ */
+ private def overloadsToConsiderBySpecificity(eligible: List[Symbol], argtpes: List[Type], varargsStar: Boolean): List[Symbol] = {
// If there are any foo=bar style arguments, and any of the overloaded
// methods has a parameter named `foo`, then only those methods are considered.
- val namesOfArgs = argtpes collect { case NamedType(name, _) => name }
- val namesMatch = (
- if (namesOfArgs.isEmpty) Nil
- else eligible filter { m =>
- namesOfArgs forall { name =>
- methodMatchesName(m, name)
+ val namesMatch = namesOfNamedArguments(argtpes) match {
+ case Nil => Nil
+ case names => eligible filter (m => names forall (name => m.info.params exists (p => paramMatchesName(p, name))))
}
- }
+ if (namesMatch.nonEmpty)
+ namesMatch
+ else if (eligible.isEmpty || eligible.tail.isEmpty)
+ eligible
+ else
+ eligible filter (alt =>
+ !alt.hasDefault && isApplicableBasedOnArity(alt.tpe, argtpes.length, varargsStar, tuplingAllowed = true)
)
-
- if (namesMatch.nonEmpty) namesMatch
- else if (eligible.isEmpty || eligible.tail.isEmpty) eligible
- else eligible filter { alt =>
- // for functional values, the `apply` method might be overloaded
- val mtypes = followApply(alt.tpe) match {
- case OverloadedType(_, alts) => alts map (_.tpe)
- case t => t :: Nil
}
- // Drop those that use a default; keep those that use vararg/tupling conversion.
- mtypes exists (t =>
- !t.typeSymbol.hasDefaultFlag && (
- compareLengths(t.params, argtpes) < 0 // tupling (*)
- || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- )
- )
- // (*) more arguments than parameters, but still applicable: tupling conversion works.
- // todo: should not return "false" when paramTypes = (Unit) no argument is given
- // (tupling would work)
- }
- }
- /** Assign <code>tree</code> the type of an alternative which is applicable
- * to <code>argtpes</code>, and whose result type is compatible with `pt`.
+ /** Assign `tree` the type of an alternative which is applicable
+ * to `argtpes`, and whose result type is compatible with `pt`.
* If several applicable alternatives exist, drop the alternatives which use
* default arguments, then select the most specialized one.
* If no applicable alternative exists, and pt != WildcardType, try again
* with pt = WildcardType.
* Otherwise, if there is no best alternative, error.
*
- * @param argtpes contains the argument types. If an argument is named, as
+ * @param argtpes0 contains the argument types. If an argument is named, as
* "a = 3", the corresponding type is `NamedType("a", Int)'. If the name
* of some NamedType does not exist in an alternative's parameter names,
* the type is replaces by `Unit`, i.e. the argument is treated as an
* assignment expression.
+ *
+ * @pre tree.tpe is an OverloadedType.
*/
- def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match {
- case OverloadedType(pre, alts) =>
- val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- tryTwice { isSecondTry =>
- debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
-
- def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
- isVarArgsList(alt.tpe.params)
- && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
- )
- val applicable = resolveOverloadedMethod(argtpes,
- alts filter (alt =>
- varargsApplicableCheck(alt)
- && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
- )
- )
-
- def improves(sym1: Symbol, sym2: Symbol) = {
- // util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
- sym2 == NoSymbol || sym2.isError || sym2.hasAnnotation(BridgeClass) ||
- isStrictlyMoreSpecific(followApply(pre.memberType(sym1)),
- followApply(pre.memberType(sym2)), sym1, sym2)
+ def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = {
+ val OverloadedType(pre, alts) = tree.tpe
+ var varargsStar = false
+ val argtpes = argtpes0 mapConserve {
+ case RepeatedType(tp) => varargsStar = true ; tp
+ case tp => tp
+ }
+ def followType(sym: Symbol) = followApply(pre memberType sym)
+ def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = {
+ val applicable0 = alts filter (alt => inSilentMode(context)(isApplicable(undetparams, followType(alt), argtpes, pt)))
+ val applicable = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar)
+ val ranked = bestAlternatives(applicable)((sym1, sym2) =>
+ isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2)
+ )
+ ranked match {
+ case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
+ case best :: Nil => tree setSymbol best setType (pre memberType best) // success
+ case Nil if pt eq WildcardType => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed
+ case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType
}
-
- val best = ((NoSymbol: Symbol) /: applicable) ((best, alt) =>
- if (improves(alt, best)) alt else best)
- val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
- if (best == NoSymbol) {
- if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt)
- else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry)
- } else if (!competing.isEmpty) {
- AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt)
- } else {
-// checkNotShadowed(tree.pos, pre, best, applicable)
- tree.setSymbol(best).setType(pre.memberType(best))
}
+ // This potentially makes up to four attempts: tryTwice may execute
+ // with and without views enabled, and bestForExpectedType will try again
+ // with pt = WildcardType if it fails with pt != WildcardType.
+ tryTwice { isLastTry =>
+ val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
+ bestForExpectedType(pt, isLastTry)
}
- case _ =>
}
/** Try inference twice, once without views and once with views,
* unless views are already disabled.
- *
- * @param infer ...
*/
def tryTwice(infer: Boolean => Unit): Unit = {
if (context.implicitsEnabled) {
@@ -1707,12 +1667,9 @@ trait Infer extends Checkable {
else infer(true)
}
- /** Assign <code>tree</code> the type of all polymorphic alternatives
- * with <code>nparams</code> as the number of type parameters, if it exists.
+ /** Assign `tree` the type of all polymorphic alternatives
+ * with `nparams` as the number of type parameters, if it exists.
* If no such polymorphic alternative exist, error.
- *
- * @param tree ...
- * @param nparams ...
*/
def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
val OverloadedType(pre, alts) = tree.tpe
@@ -1737,7 +1694,7 @@ trait Infer extends Checkable {
}
else if (sym.isOverloaded) {
val xs = sym.alternatives
- val tparams = new AsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
+ val tparams = newAsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
val bounds = tparams map (_.tpeHK) // see e.g., #1236
val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), xs))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 0ba30ffa73..91ebd798e1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -3,15 +3,11 @@ package typechecker
import symtab.Flags._
import scala.tools.nsc.util._
-import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
-import scala.compat.Platform.EOL
+import scala.reflect.ClassTag
import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
-import java.lang.{Class => jClass}
-import java.lang.reflect.{Array => jArray, Method => jMethod}
-import scala.reflect.internal.util.Collections._
import scala.util.control.ControlThrowable
import scala.reflect.macros.runtime.AbortMacroException
@@ -47,8 +43,15 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
import definitions._
import treeInfo.{isRepeatedParamType => _, _}
import MacrosStats._
+
def globalSettings = global.settings
+ protected def findMacroClassLoader(): ClassLoader = {
+ val classpath = global.classPath.asURLs
+ macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
+ ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+ }
+
/** `MacroImplBinding` and its companion module are responsible for
* serialization/deserialization of macro def -> impl bindings.
*
@@ -74,19 +77,19 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
private case class MacroImplBinding(
// Java class name of the class that contains the macro implementation
// is used to load the corresponding object with Java reflection
- val className: String,
+ className: String,
// method name of the macro implementation
// `className` and `methName` are all we need to reflectively invoke a macro implementation
// because macro implementations cannot be overloaded
- val methName: String,
+ methName: String,
// flattens the macro impl's parameter lists having symbols replaced with metadata
// currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
// f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
// `signature` will be equal to List(-1, -1, 0, 1)
- val signature: List[Int],
+ signature: List[Int],
// type arguments part of a macro impl ref (the right-hand side of a macro definition)
// these trees don't refer to a macro impl, so we can pickle them as is
- val targs: List[Tree])
+ targs: List[Tree])
/** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation
* with synthetic content that carries the payload described in `MacroImplBinding`.
@@ -128,7 +131,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
// todo. refactor when fixing SI-5498
def className: String = {
def loop(sym: Symbol): String = sym match {
- case sym if sym.owner.isPackageClass =>
+ case sym if sym.isTopLevel =>
val suffix = if (sym.isModuleClass) "$" else ""
sym.fullName + suffix
case sym =>
@@ -293,51 +296,51 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = {
// had to move method's body to an object because of the recursive dependencies between sigma and param
object SigGenerator {
- def sigma(tpe: Type): Type = {
- class SigmaTypeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = pre match {
- case ThisType(sym) if sym == macroDef.owner =>
- SingleType(SingleType(SingleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
- case SingleType(NoPrefix, sym) =>
- mfind(vparamss)(_.symbol == sym) match {
- case Some(macroDefParam) => SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
- case _ => pre
- }
- case _ =>
- pre
- }
- TypeRef(pre1, sym, args map mapOver)
- case _ =>
- mapOver(tp)
- }
+ def WeakTagClass = getMember(MacroContextClass, tpnme.WeakTypeTag)
+ def ExprClass = getMember(MacroContextClass, tpnme.Expr)
+ val cache = scala.collection.mutable.Map[Symbol, Symbol]()
+ val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC)
+ val paramss = List(ctxParam) :: mmap(vparamss)(param)
+ val implReturnType = typeRef(singleType(NoPrefix, ctxParam), ExprClass, List(sigma(retTpe)))
+
+ object SigmaTypeMap extends TypeMap {
+ def mapPrefix(pre: Type) = pre match {
+ case ThisType(sym) if sym == macroDef.owner =>
+ singleType(singleType(singleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
+ case SingleType(NoPrefix, sym) =>
+ mfind(vparamss)(_.symbol == sym).fold(pre)(p => singleType(singleType(NoPrefix, param(p)), ExprValue))
+ case _ =>
+ mapOver(pre)
+ }
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = mapPrefix(pre)
+ val args1 = mapOverArgs(args, sym.typeParams)
+ if ((pre eq pre1) && (args eq args1)) tp
+ else typeRef(pre1, sym, args1)
+ case _ =>
+ mapOver(tp)
}
-
- new SigmaTypeMap() apply tpe
}
+ def sigma(tpe: Type): Type = SigmaTypeMap(tpe)
+
+ def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) =
+ macroDef.newValueParameter(name.toTermName, pos, flags) setInfo tpe
+ def implType(isType: Boolean, origTpe: Type): Type = {
+ def tsym = if (isType) WeakTagClass else ExprClass
+ def targ = origTpe.typeArgs.headOption getOrElse NoType
- def makeParam(name: Name, pos: Position, tpe: Type, flags: Long = 0L) =
- macroDef.newValueParameter(name, pos, flags) setInfo tpe
- val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC)
- def implType(isType: Boolean, origTpe: Type): Type =
if (isRepeatedParamType(origTpe))
- appliedType(
- RepeatedParamClass.typeConstructor,
- List(implType(isType, sigma(origTpe.typeArgs.head))))
- else {
- val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
+ scalaRepeatedType(implType(isType, sigma(targ)))
+ else
typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
- }
- val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
- def param(tree: Tree): Symbol =
- paramCache.getOrElseUpdate(tree.symbol, {
+ }
+ def param(tree: Tree): Symbol = (
+ cache.getOrElseUpdate(tree.symbol, {
val sym = tree.symbol
makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
})
-
- val paramss = List(ctxParam) :: mmap(vparamss)(param)
- val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), getMember(MacroContextClass, tpnme.Expr), List(sigma(retTpe)))
+ )
}
import SigGenerator._
@@ -345,7 +348,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
macroTraceVerbose("tparams are: ")(tparams)
macroTraceVerbose("vparamss are: ")(vparamss)
macroTraceVerbose("retTpe is: ")(retTpe)
- macroTraceVerbose("macroImplSig is: ")((paramss, implRetTpe))
+ macroTraceVerbose("macroImplSig is: ")((paramss, implReturnType))
}
/** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method,
@@ -362,7 +365,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
// Phase I: sanity checks
val macroDef = macroDdef.symbol
macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
- assert(macroDef.isTermMacro, macroDdef)
+ assert(macroDef.isMacro, macroDdef)
if (fastTrack contains macroDef) MacroDefIsFastTrack()
if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) MacroFeatureNotEnabled()
@@ -377,7 +380,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
// doesn't manifest itself as an error in the resulting tree
val prevNumErrors = reporter.ERROR.count
var rhs1 = typer.typed1(rhs, EXPRmode, WildcardType)
- def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isTermMacro && !rhs1.symbol.isErroneous
+ def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isMacro && !rhs1.symbol.isErroneous
while (rhsNeedsMacroExpansion) {
rhs1 = macroExpand1(typer, rhs1) match {
case Success(expanded) =>
@@ -390,8 +393,12 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
}
case Fallback(fallback) =>
typer.typed1(fallback, EXPRmode, WildcardType)
- case Other(result) =>
- result
+ case Delayed(delayed) =>
+ delayed
+ case Skipped(skipped) =>
+ skipped
+ case Failure(failure) =>
+ failure
}
}
val typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
@@ -474,24 +481,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* Loads classes from from -cp (aka the library classpath).
* Is also capable of detecting REPL and reusing its classloader.
*/
- lazy val macroClassloader: ClassLoader = {
- if (global.forMSIL)
- throw new UnsupportedOperationException("Scala reflection not available on this platform")
-
- val classpath = global.classPath.asURLs
- macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
- val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
-
- // a heuristic to detect the REPL
- if (global.settings.exposeEmptyPackage.value) {
- macroLogVerbose("macro classloader: initializing from a REPL classloader".format(global.classPath.asURLs))
- import scala.tools.nsc.interpreter._
- val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
- new AbstractFileClassLoader(virtualDirectory, loader) {}
- } else {
- loader
- }
- }
+ lazy val macroClassloader: ClassLoader = findMacroClassLoader()
/** Produces a function that can be used to invoke macro implementation for a given macro definition:
* 1) Looks up macro implementation symbol in this universe.
@@ -503,7 +493,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* `null` otherwise.
*/
type MacroRuntime = MacroArgs => Any
- private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
+ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]()
private def macroRuntime(macroDef: Symbol): MacroRuntime = {
macroTraceVerbose("looking for macro implementation: ")(macroDef)
if (fastTrack contains macroDef) {
@@ -541,19 +531,22 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
}
}
- private def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext =
+ private def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext = {
new {
val universe: self.global.type = self.global
val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
- val expandee = expandeeTree
+ val expandee = universe.analyzer.macroExpanderAttachment(expandeeTree).original orElse expandeeTree
+ val macroRole = universe.analyzer.macroExpanderAttachment(expandeeTree).role
} with UnaffiliatedMacroContext {
val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing)
override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
}
+ }
/** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
*/
case class MacroArgs(c: MacroContext, others: List[Any])
+
private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
val macroDef = expandee.symbol
val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
@@ -582,9 +575,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val preparedArgss: List[List[Any]] =
if (fastTrack contains macroDef) {
- if (fastTrack(macroDef) validate context) argss
+ // Take a dry run of the fast track implementation
+ if (fastTrack(macroDef) validate expandee) argss
else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
- } else {
+ }
+ else {
// if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
// consider the following example:
//
@@ -650,21 +645,37 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
private def popMacroContext() = _openMacros = _openMacros.tail
def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
- private sealed abstract class MacroExpansionResult
- private case class Success(expanded: Tree) extends MacroExpansionResult
- private case class Fallback(fallback: Tree) extends MacroExpansionResult { currentRun.seenMacroExpansionsFallingBack = true }
- private case class Other(result: Tree) extends MacroExpansionResult
- private def Delay(expanded: Tree) = Other(expanded)
- private def Skip(expanded: Tree) = Other(expanded)
- private def Cancel(expandee: Tree) = Other(expandee)
- private def Failure(expandee: Tree) = Other(expandee)
+ /** Describes the role that the macro expandee is performing.
+ */
+ type MacroRole = String
+ final def APPLY_ROLE: MacroRole = "APPLY_ROLE"
+ private val roleNames = Map(APPLY_ROLE -> "apply")
/** Performs macro expansion:
- * 1) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
- * 2) Loads macro implementation using `macroMirror`
- * 3) Synthesizes invocation arguments for the macro implementation
- * 4) Checks that the result is a tree bound to this universe
- * 5) Typechecks the result against the return type of the macro definition
+ *
+ * ========= Expandable trees =========
+ *
+ * A term of one of the following shapes:
+ *
+ * Ident(<term macro>)
+ * Select(<any qualifier>, <term macro>)
+ * TypeApply(<any of the above>, <targs>)
+ * Apply(...Apply(<any of the above>, <args1>)...<argsN>)
+ *
+ * ========= Macro expansion =========
+ *
+ * First of all `macroExpandXXX`:
+ * 1) If necessary desugars the `expandee` to fit into `macroExpand1`
+ *
+ * Then `macroExpand1`:
+ * 2) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
+ * 3) Loads macro implementation using `macroMirror`
+ * 4) Synthesizes invocation arguments for the macro implementation
+ * 5) Checks that the result is a tree or an expr bound to this universe
+ *
+ * Finally `macroExpandXXX`:
+ * 6) Validates the expansion against the white list of supported tree shapes
+ * 7) Typechecks the result as required by the circumstances of the macro application
*
* If -Ymacro-debug-lite is enabled, you will get basic notifications about macro expansion
* along with macro expansions logged in the form that can be copy/pasted verbatim into REPL.
@@ -675,60 +686,126 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
*
* @return
* the expansion result if the expansion has been successful,
- * the fallback method invocation if the expansion has been unsuccessful, but there is a fallback,
+ * the fallback tree if the expansion has been unsuccessful, but there is a fallback,
* the expandee unchanged if the expansion has been delayed,
* the expandee fully expanded if the expansion has been delayed before and has been expanded now,
* the expandee with an error marker set if the expansion has been cancelled due malformed arguments or implementation
* the expandee with an error marker set if there has been an error
*/
- def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
- if (settings.Ymacronoexpand.value) return expandee // SI-6812
- val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
- if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
- try {
- macroExpand1(typer, expandee) match {
- case Success(expanded) =>
- try {
- def typecheck(phase: String, tree: Tree, pt: Type): Tree = {
- if (tree.isErroneous) return tree
- macroLogVerbose(s"typechecking against $phase $pt: $expanded")
- val numErrors = reporter.ERROR.count
- def hasNewErrors = reporter.ERROR.count > numErrors
- val result = typer.context.withImplicitsEnabled(typer.typed(tree, EXPRmode, pt))
- macroTraceVerbose(s"""${if (hasNewErrors) "failed to typecheck" else "successfully typechecked"} against $phase $pt:\n$result\n""")(result)
+ private abstract class MacroExpander[Result: ClassTag](val role: MacroRole, val typer: Typer, val expandee: Tree) {
+ def allowExpandee(expandee: Tree): Boolean = true
+ def allowExpanded(expanded: Tree): Boolean = true
+ def allowedExpansions: String = "anything"
+ def allowResult(result: Result): Boolean = true
+
+ def onSuccess(expanded: Tree): Result
+ def onFallback(expanded: Tree): Result
+ def onSuppressed(expandee: Tree): Result = expandee match { case expandee: Result => expandee }
+ def onDelayed(expanded: Tree): Result = expanded match { case expanded: Result => expanded }
+ def onSkipped(expanded: Tree): Result = expanded match { case expanded: Result => expanded }
+ def onFailure(expanded: Tree): Result = { typer.infer.setError(expandee); expandee match { case expandee: Result => expandee } }
+
+ def apply(desugared: Tree): Result = {
+ if (isMacroExpansionSuppressed(desugared)) onSuppressed(expandee)
+ else expand(desugared)
+ }
+
+ protected def expand(desugared: Tree): Result = {
+ def showDetailed(tree: Tree) = showRaw(tree, printIds = true, printTypes = true)
+ def summary() = s"expander = $this, expandee = ${showDetailed(expandee)}, desugared = ${if (expandee == desugared) () else showDetailed(desugared)}"
+ if (macroDebugVerbose) println(s"macroExpand: ${summary()}")
+ assert(allowExpandee(expandee), summary())
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
+ if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
+ try {
+ linkExpandeeAndDesugared(expandee, desugared, role)
+ macroExpand1(typer, desugared) match {
+ case Success(expanded) =>
+ if (allowExpanded(expanded)) {
+ // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+ val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
+ if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
+ if (allowResult(expanded1)) expanded1 else onFailure(expanded)
+ } else {
+ typer.TyperErrorGen.MacroInvalidExpansionError(expandee, roleNames(role), allowedExpansions)
+ onFailure(expanded)
}
+ case Fallback(fallback) => onFallback(fallback)
+ case Delayed(delayed) => onDelayed(delayed)
+ case Skipped(skipped) => onSkipped(skipped)
+ case Failure(failure) => onFailure(failure)
+ }
+ } finally {
+ if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
+ }
+ }
+ }
- var expectedTpe = expandee.tpe
- if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
- var typechecked = typecheck("macro def return type", expanded, expectedTpe)
- typechecked = typecheck("expected type", typechecked, pt)
- typechecked
- } finally {
- popMacroContext()
- }
- case Fallback(fallback) =>
- typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
- case Other(result) =>
- result
+ /** Expands a tree that carries a term, which happens to be a term macro.
+ * @see MacroExpander
+ */
+ private abstract class TermMacroExpander(role: MacroRole, typer: Typer, expandee: Tree, mode: Mode, pt: Type)
+ extends MacroExpander[Tree](role, typer, expandee) {
+ override def allowedExpansions: String = "term trees"
+ override def allowExpandee(expandee: Tree) = expandee.isTerm
+ override def onSuccess(expanded: Tree) = typer.typed(expanded, mode, pt)
+ override def onFallback(fallback: Tree) = typer.typed(fallback, mode, pt)
+ }
+
+ /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
+ * @see MacroExpander
+ */
+ def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, pt: Type) = {
+ object expander extends TermMacroExpander(APPLY_ROLE, typer, expandee, mode, pt) {
+ override def onSuccess(expanded: Tree) = {
+ // prematurely annotate the tree with a macro expansion attachment
+ // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
+ linkExpandeeAndExpanded(expandee, expanded)
+ var expectedTpe = expandee.tpe
+ if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
+ // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
+ // therefore we need to re-enable the conversions back temporarily
+ if (macroDebugVerbose) println(s"typecheck #1 (against expectedTpe = $expectedTpe): $expanded")
+ val expanded1 = typer.context.withImplicitsEnabled(typer.typed(expanded, mode, expectedTpe))
+ if (expanded1.isErrorTyped) {
+ if (macroDebugVerbose) println(s"typecheck #1 has failed: ${typer.context.errBuffer}")
+ expanded1
+ } else {
+ if (macroDebugVerbose) println(s"typecheck #2 (against pt = $pt): $expanded1")
+ val expanded2 = typer.context.withImplicitsEnabled(super.onSuccess(expanded1))
+ if (macroDebugVerbose && expanded2.isErrorTyped) println(s"typecheck #2 has failed: ${typer.context.errBuffer}")
+ expanded2
+ }
}
- } finally {
- if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
}
+ expander(expandee)
}
+ /** Captures statuses of macro expansions performed by `macroExpand1'.
+ */
+ private sealed abstract class MacroStatus(val result: Tree)
+ private case class Success(expanded: Tree) extends MacroStatus(expanded)
+ private case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
+ private case class Delayed(delayed: Tree) extends MacroStatus(delayed)
+ private case class Skipped(skipped: Tree) extends MacroStatus(skipped)
+ private case class Failure(failure: Tree) extends MacroStatus(failure)
+ private def Delay(expanded: Tree) = Delayed(expanded)
+ private def Skip(expanded: Tree) = Skipped(expanded)
+ private def Cancel(expandee: Tree) = Failure(expandee)
+
/** Does the same as `macroExpand`, but without typechecking the expansion
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult =
+ private def macroExpand1(typer: Typer, expandee: Tree): MacroStatus = {
// verbose printing might cause recursive macro expansions, so I'm shutting it down here
withInfoLevel(nodePrinters.InfoLevel.Quiet) {
if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
macroTraceVerbose("cancelled macro expansion because of %s: ".format(reason))(expandee)
- return Cancel(typer.infer.setError(expandee))
+ Cancel(typer.infer.setError(expandee))
}
-
- try {
+ else try {
val runtime = macroRuntime(expandee.symbol)
if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
else macroExpandWithoutRuntime(typer, expandee)
@@ -736,18 +813,23 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case typer.TyperErrorGen.MacroExpansionException => Failure(expandee)
}
}
+ }
/** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroExpansionResult = {
+ private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = {
val wasDelayed = isDelayed(expandee)
val undetparams = calculateUndetparams(expandee)
val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
(wasDelayed, nowDelayed) match {
- case (true, true) => Delay(expandee)
- case (true, false) => Skip(macroExpandAll(typer, expandee))
+ case (true, true) =>
+ Delay(expandee)
+ case (true, false) =>
+ val expanded = macroExpandAll(typer, expandee)
+ if (expanded exists (_.isErroneous)) Failure(expandee)
+ else Skip(expanded)
case (false, true) =>
macroLogLite("macro expansion is delayed: %s".format(expandee))
delayed += expandee -> undetparams
@@ -762,15 +844,16 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
def hasNewErrors = reporter.ERROR.count > numErrors
val expanded = { pushMacroContext(args.c); runtime(args) }
if (hasNewErrors) MacroGeneratedTypeError(expandee)
+ def validateResultingTree(expanded: Tree) = {
+ macroLogVerbose("original:")
+ macroLogLite("" + expanded + "\n" + showRaw(expanded))
+ val freeSyms = expanded.freeTerms ++ expanded.freeTypes
+ freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
+ Success(atPos(enclosingMacroPosition.focus)(expanded))
+ }
expanded match {
- case expanded: Expr[_] =>
- macroLogVerbose("original:")
- macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
- val freeSyms = expanded.tree.freeTerms ++ expanded.tree.freeTypes
- freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
- Success(atPos(enclosingMacroPosition.focus)(expanded.tree updateAttachment MacroExpansionAttachment(expandee)))
- case _ =>
- MacroExpansionIsNotExprError(expandee, expanded)
+ case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree)
+ case _ => MacroExpansionHasInvalidTypeError(expandee, expanded)
}
} catch {
case ex: Throwable =>
@@ -791,7 +874,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
/** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroExpansionResult = {
+ private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = {
import typer.TyperErrorGen._
val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
macroTraceLite("falling back to: ")(fallbackSym)
@@ -819,7 +902,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
- private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
+ private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]()
private def isDelayed(expandee: Tree) = delayed contains expandee
private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
delayed.get(expandee).getOrElse {
@@ -832,7 +915,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
macroLogVerbose("calculateUndetparams: %s".format(calculated))
calculated map (_.id)
}
- private val undetparams = perRunCaches.newSet[Int]
+ private val undetparams = perRunCaches.newSet[Int]()
def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
undetparams ++= newUndets map (_.id)
if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym)))
@@ -861,13 +944,13 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
new Transformer {
override def transform(tree: Tree) = super.transform(tree match {
// todo. expansion should work from the inside out
- case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty =>
+ case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty && !tree.isErroneous =>
val context = tree.attachments.get[MacroRuntimeAttachment].get.typerContext
delayed -= tree
context.implicitsEnabled = typer.context.implicitsEnabled
context.enrichmentEnabled = typer.context.enrichmentEnabled
context.macrosEnabled = typer.context.macrosEnabled
- macroExpand(newTyper(context), tree, EXPRmode, WildcardType)
+ macroExpandApply(newTyper(context), tree, EXPRmode, WildcardType)
case _ =>
tree
})
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 99557d1527..8c686107b4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -6,7 +6,6 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ ojoin }
import scala.reflect.ClassTag
import scala.reflect.runtime.{ universe => ru }
@@ -30,66 +29,30 @@ trait MethodSynthesis {
if (sym.isLazy) ValDef(sym, body)
else DefDef(sym, body)
- def applyTypeInternal(tags: List[TT[_]]): Type = {
- val symbols = tags map compilerSymbolFromTag
- val container :: args = symbols
- val tparams = container.typeConstructor.typeParams
-
- // Conservative at present - if manifests were more usable this could do a lot more.
- // [Eugene to Paul] all right, they are now. what do you have in mind?
- require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols)
- require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
- require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
-
- appliedType(container, args map (_.tpe): _*)
- }
-
- def companionType[T](implicit ct: CT[T]) =
- rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe
-
- // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
- def applyType[CC](implicit t1: TT[CC]): Type =
- applyTypeInternal(List(t1))
-
- def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type =
- applyTypeInternal(List(t1, t2))
-
- def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type =
- applyTypeInternal(List(t1, t2, t3))
-
- def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type =
- applyTypeInternal(List(t1, t2, t3, t4))
-
- def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = {
- val fnSymbol = compilerSymbolFromTag(t)
- val formals = compilerTypeFromTag(t).typeArguments
- assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t))
- val params = owner newSyntheticValueParams formals
- MethodType(params, formals.last)
- }
-
- /** The annotations amongst those found on the original symbol which
- * should be propagated to this kind of accessor.
- */
- def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
- initial filter { ann =>
- // There are no meta-annotation arguments attached to `ann`
- if (ann.metaAnnotations.isEmpty) {
- // A meta-annotation matching `annotKind` exists on `ann`'s definition.
- (ann.defaultTargets contains category) ||
- // `ann`'s definition has no meta-annotations, and `keepClean` is true.
- (ann.defaultTargets.isEmpty && keepClean)
- }
- // There are meta-annotation arguments, and one of them matches `annotKind`
- else ann.metaAnnotations exists (_ matches category)
+ /** The annotations amongst those found on the original symbol which
+ * should be propagated to this kind of accessor.
+ */
+ def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
+ initial filter { ann =>
+ // There are no meta-annotation arguments attached to `ann`
+ if (ann.metaAnnotations.isEmpty) {
+ // A meta-annotation matching `annotKind` exists on `ann`'s definition.
+ (ann.defaultTargets contains category) ||
+ // `ann`'s definition has no meta-annotations, and `keepClean` is true.
+ (ann.defaultTargets.isEmpty && keepClean)
}
+ // There are meta-annotation arguments, and one of them matches `annotKind`
+ else ann.metaAnnotations exists (_ matches category)
}
- }
+ }
+ }
import synthesisUtil._
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
def mkThis = This(clazz) setPos clazz.pos.focus
- def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(Select(mkThis, sym))
+ def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(
+ if (clazz.isClass) Select(This(clazz), sym) else Ident(sym)
+ )
private def isOverride(name: TermName) =
clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
@@ -99,7 +62,7 @@ trait MethodSynthesis {
overrideFlag | SYNTHETIC
}
def newMethodFlags(method: Symbol) = {
- val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L
+ val overrideFlag = if (isOverride(method.name.toTermName)) OVERRIDE else 0L
(method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED
}
@@ -107,11 +70,13 @@ trait MethodSynthesis {
localTyper typed ValOrDefDef(method, f(method))
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
- val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ val name1 = name.toTermName
+ val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter info, f)
}
private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
- val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ val name1 = name.toTermName
+ val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter infoFn(m), f)
}
private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
@@ -119,22 +84,9 @@ trait MethodSynthesis {
finishMethod(clazz.info.decls enter m, f)
}
- private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree =
- cloneInternal(original, f, original.name)
-
def clazzMember(name: Name) = clazz.info nonPrivateMember name
def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
- /** Function argument takes the newly created method symbol of
- * the same type as `name` in clazz, and returns the tree to be
- * added to the template.
- */
- def overrideMethod(name: Name)(f: Symbol => Tree): Tree =
- overrideMethod(clazzMember(name))(f)
-
- def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree =
- cloneInternal(original, sym => f(sym setFlag OVERRIDE))
-
def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
cloneInternal(original, f, nameFn(original.name))
@@ -174,7 +126,7 @@ trait MethodSynthesis {
/** There are two key methods in here.
*
- * 1) Enter methods such as enterGetterSetterare called
+ * 1) Enter methods such as enterGetterSetter are called
* from Namer with a tree which may generate further trees such as accessors or
* implicit wrappers. Some setup is performed. In general this creates symbols
* and enters them into the scope of the owner.
@@ -219,14 +171,46 @@ trait MethodSynthesis {
enterBeans(tree)
}
+ /** This is called for those ValDefs which addDerivedTrees ignores, but
+ * which might have a warnable annotation situation.
+ */
+ private def warnForDroppedAnnotations(tree: Tree) {
+ val annotations = tree.symbol.initialize.annotations
+ val targetClass = defaultAnnotationTarget(tree)
+ val retained = deriveAnnotations(annotations, targetClass, keepClean = true)
+
+ annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(tree, ann, targetClass))
+ }
+ private def issueAnnotationWarning(tree: Tree, ann: AnnotationInfo, defaultTarget: Symbol) {
+ global.reporter.warning(ann.pos,
+ s"no valid targets for annotation on ${tree.symbol} - it is discarded unused. " +
+ s"You may specify targets with meta-annotations, e.g. @($ann @${defaultTarget.name})")
+ }
+
def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match {
case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
// If we don't save the annotations, they seem to wander off.
val annotations = stat.symbol.initialize.annotations
- ( allValDefDerived(vd)
+ val trees = (
+ allValDefDerived(vd)
map (acc => atPos(vd.pos.focus)(acc derive annotations))
filterNot (_ eq EmptyTree)
)
+ // Verify each annotation landed safely somewhere, else warn.
+ // Filtering when isParamAccessor is a necessary simplification
+ // because there's a bunch of unwritten annotation code involving
+ // the propagation of annotations - constructor parameter annotations
+ // may need to make their way to parameters of the constructor as
+ // well as fields of the class, etc.
+ if (!mods.isParamAccessor) annotations foreach (ann =>
+ if (!trees.exists(_.symbol hasAnnotation ann.symbol))
+ issueAnnotationWarning(vd, ann, GetterTargetClass)
+ )
+
+ trees
+ case vd: ValDef =>
+ warnForDroppedAnnotations(vd)
+ vd :: Nil
case cd @ ClassDef(mods, _, _, _) if mods.isImplicit =>
val annotations = stat.symbol.initialize.annotations
// TODO: need to shuffle annotations between wrapper and class.
@@ -253,8 +237,7 @@ trait MethodSynthesis {
)
def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
- if (forMSIL) Nil
- else if (vd.symbol hasAnnotation BeanPropertyAttr)
+ if (vd.symbol hasAnnotation BeanPropertyAttr)
BeanGetter(vd) :: setter
else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr)
BooleanBeanGetter(vd) :: setter
@@ -312,7 +295,6 @@ trait MethodSynthesis {
// Final methods to make the rest easier to reason about.
final def mods = tree.mods
final def basisSym = tree.symbol
- final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
}
trait DerivedFromClassDef extends DerivedFromMemberDef {
@@ -458,7 +440,7 @@ trait MethodSynthesis {
case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) {
class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol)
extends ChangeOwnerTraverser(oldowner, newowner) {
-
+
override def traverse(tree: Tree) {
tree match {
case _: DefTree => change(tree.symbol.moduleClass)
@@ -558,7 +540,7 @@ trait MethodSynthesis {
// No Symbols available.
private def beanAccessorsFromNames(tree: ValDef) = {
- val ValDef(mods, name, tpt, _) = tree
+ val ValDef(mods, _, _, _) = tree
val hasBP = mods hasAnnotationNamed tpnme.BeanPropertyAnnot
val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
@@ -575,9 +557,6 @@ trait MethodSynthesis {
}
protected def enterBeans(tree: ValDef) {
- if (forMSIL)
- return
-
val ValDef(mods, name, _, _) = tree
val beans = beanAccessorsFromNames(tree)
if (beans.nonEmpty) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 7a3ab00578..007c7c6a83 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -8,9 +8,7 @@ package typechecker
import scala.collection.mutable
import scala.annotation.tailrec
-import scala.ref.WeakReference
import symtab.Flags._
-import scala.tools.nsc.io.AbstractFile
/** This trait declares methods to create symbols and to enter them into scopes.
*
@@ -49,10 +47,11 @@ trait Namers extends MethodSynthesis {
private class NormalNamer(context: Context) extends Namer(context)
def newNamer(context: Context): Namer = new NormalNamer(context)
- def newNamerFor(context: Context, tree: Tree): Namer =
- newNamer(context.makeNewScope(tree, tree.symbol))
+ def newNamerFor(context: Context, tree: Tree): Namer = newNamer(context.makeNewScope(tree, tree.symbol))
abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
+ // overridden by the presentation compiler
+ def saveDefaultGetter(meth: Symbol, default: Symbol) { }
import NamerErrorGen._
val typer = newTyper(context)
@@ -150,7 +149,7 @@ trait Namers extends MethodSynthesis {
sym reset NoType setFlag newFlags setPos pos
sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags)))
- if (sym.owner.isPackageClass) {
+ if (sym.isTopLevel) {
companionSymbolOf(sym, context) andAlso { companion =>
val assignNoType = companion.rawInfo match {
case _: SymLoader => true
@@ -173,10 +172,13 @@ trait Namers extends MethodSynthesis {
else innerNamer
}
+ // FIXME - this logic needs to be thoroughly explained
+ // and justified. I know it's wrong with repect to package
+ // objects, but I think it's also wrong in other ways.
protected def conflict(newS: Symbol, oldS: Symbol) = (
( !oldS.isSourceMethod
|| nme.isSetterName(newS.name)
- || newS.owner.isPackageClass
+ || newS.isTopLevel
) &&
!( // @M: allow repeated use of `_` for higher-order type params
(newS.owner.isTypeParameter || newS.owner.isAbstractType)
@@ -187,7 +189,7 @@ trait Namers extends MethodSynthesis {
)
private def allowsOverload(sym: Symbol) = (
- sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass
+ sym.isSourceMethod && sym.owner.isClass && !sym.isTopLevel
)
private def inCurrentScope(m: Symbol): Boolean = {
@@ -200,6 +202,19 @@ trait Namers extends MethodSynthesis {
/** Enter symbol into given scope and return symbol itself */
def enterInScope(sym: Symbol, scope: Scope): Symbol = {
+ // FIXME - this is broken in a number of ways.
+ //
+ // 1) If "sym" allows overloading, that is not itself sufficient to skip
+ // the check, because "prev.sym" also must allow overloading.
+ //
+ // 2) There is nothing which reconciles a package's scope with
+ // the package object's scope. This is the source of many bugs
+ // with e.g. defining a case class in a package object. When
+ // compiling against classes, the class symbol is created in the
+ // package and in the package object, and the conflict is undetected.
+ // There is also a non-deterministic outcome for situations like
+ // an object with the same name as a method in the package object.
+
// allow for overloaded methods
if (!allowsOverload(sym)) {
val prev = scope.lookupEntry(sym.name)
@@ -300,11 +315,11 @@ trait Namers extends MethodSynthesis {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos, flags)
case DefDef(_, _, _, _, _, _) => owner.newMethod(name.toTermName, pos, flags)
case ClassDef(_, _, _, _) => owner.newClassSymbol(name.toTypeName, pos, flags)
- case ModuleDef(_, _, _) => owner.newModule(name, pos, flags)
+ case ModuleDef(_, _, _) => owner.newModule(name.toTermName, pos, flags)
case PackageDef(pid, _) => createPackageSymbol(pos, pid)
case ValDef(_, _, _, _) =>
- if (isParameter) owner.newValueParameter(name, pos, flags)
- else owner.newValue(name, pos, flags)
+ if (isParameter) owner.newValueParameter(name.toTermName, pos, flags)
+ else owner.newValue(name.toTermName, pos, flags)
}
}
private def createFieldSymbol(tree: ValDef): TermSymbol =
@@ -335,11 +350,10 @@ trait Namers extends MethodSynthesis {
}
private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = {
- val file = contextFile
if (clazz.sourceFile != null && clazz.sourceFile != contextFile)
- debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile)
+ devWarning(s"Source file mismatch in $clazz: ${clazz.sourceFile} vs. $contextFile")
- clazz.sourceFile = contextFile
+ clazz.associatedFile = contextFile
if (clazz.sourceFile != null) {
assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile)
currentRun.symSource(clazz) = clazz.sourceFile
@@ -353,7 +367,7 @@ trait Namers extends MethodSynthesis {
val existing = context.scope.lookup(tree.name)
val isRedefinition = (
existing.isType
- && existing.owner.isPackageClass
+ && existing.isTopLevel
&& context.scope == existing.owner.info.decls
&& currentRun.canRedefine(existing)
)
@@ -366,8 +380,8 @@ trait Namers extends MethodSynthesis {
else assignAndEnterSymbol(tree) setFlag inConstructorFlag
}
clazz match {
- case csym: ClassSymbol if csym.owner.isPackageClass => enterClassSymbol(tree, csym)
- case _ => clazz
+ case csym: ClassSymbol if csym.isTopLevel => enterClassSymbol(tree, csym)
+ case _ => clazz
}
}
@@ -379,8 +393,8 @@ trait Namers extends MethodSynthesis {
if (sym eq NoSymbol) return
val ctx = if (context.owner.isPackageObjectClass) context.outer else context
- val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName
- val clazz = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName
+ val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
+ val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name
val fails = (
module.isModule
&& clazz.isClass
@@ -426,8 +440,8 @@ trait Namers extends MethodSynthesis {
m.moduleClass setFlag moduleClassFlags(moduleFlags)
setPrivateWithin(tree, m.moduleClass)
}
- if (m.owner.isPackageClass && !m.isPackage) {
- m.moduleClass.sourceFile = contextFile
+ if (m.isTopLevel && !m.isPackage) {
+ m.moduleClass.associatedFile = contextFile
currentRun.symSource(m) = m.moduleClass.sourceFile
registerTopLevelSym(m)
}
@@ -489,7 +503,7 @@ trait Namers extends MethodSynthesis {
typer.permanentlyHiddenWarning(pos, to0, e.sym)
else if (context ne context.enclClass) {
val defSym = context.prefix.member(to) filter (
- sym => sym.exists && context.isAccessible(sym, context.prefix, false))
+ sym => sym.exists && context.isAccessible(sym, context.prefix, superAccess = false))
defSym andAlso (typer.permanentlyHiddenWarning(pos, to0, _))
}
@@ -581,17 +595,6 @@ trait Namers extends MethodSynthesis {
}
}
- def enterIfNotThere(sym: Symbol) {
- val scope = context.scope
- @tailrec def search(e: ScopeEntry) {
- if ((e eq null) || (e.owner ne scope))
- scope enter sym
- else if (e.sym ne sym) // otherwise, aborts since we found sym
- search(e.tail)
- }
- search(scope lookupEntry sym.name)
- }
-
def enterValDef(tree: ValDef) {
if (noEnterGetterSetter(tree))
assignAndEnterFinishedSymbol(tree)
@@ -614,7 +617,7 @@ trait Namers extends MethodSynthesis {
// via "x$lzy" as can be seen in test #3927.
val sym = (
if (owner.isClass) createFieldSymbol(tree)
- else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, tree.mods.flags & ~IMPLICIT)
+ else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, (tree.mods.flags | ARTIFACT) & ~IMPLICIT)
)
enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
}
@@ -635,7 +638,7 @@ trait Namers extends MethodSynthesis {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) =>
assignAndEnterFinishedSymbol(tree)
case DefDef(mods, name, tparams, _, _, _) =>
- val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0
+ val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
@@ -645,7 +648,7 @@ trait Namers extends MethodSynthesis {
}
def enterClassDef(tree: ClassDef) {
- val ClassDef(mods, name, tparams, impl) = tree
+ val ClassDef(mods, _, _, impl) = tree
val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size
tree.symbol = enterClassSymbol(tree)
tree.symbol setInfo completerOf(tree)
@@ -684,22 +687,9 @@ trait Namers extends MethodSynthesis {
validateCompanionDefs(tree)
}
- // this logic is needed in case typer was interrupted half
- // way through and then comes back to do the tree again. In
- // that case the definitions that were already attributed as
- // well as any default parameters of such methods need to be
- // re-entered in the current scope.
- protected def enterExistingSym(sym: Symbol): Context = {
- if (forInteractive && sym != null && sym.owner.isTerm) {
- enterIfNotThere(sym)
- if (sym.isLazy)
- sym.lazyAccessor andAlso enterIfNotThere
-
- for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
- defAtt.defaultGetters foreach enterIfNotThere
- }
- this.context
- }
+ // Hooks which are overridden in the presentation compiler
+ def enterExistingSym(sym: Symbol): Context = this.context
+ def enterIfNotThere(sym: Symbol) { }
def enterSyntheticSym(tree: Tree): Symbol = {
enterSym(tree)
@@ -709,41 +699,55 @@ trait Namers extends MethodSynthesis {
// --- Lazy Type Assignment --------------------------------------------------
- def initializeLowerBounds(tp: Type): Type = {
+ def findCyclicalLowerBound(tp: Type): Symbol = {
tp match {
case TypeBounds(lo, _) =>
// check that lower bound is not an F-bound
- for (TypeRef(_, sym, _) <- lo)
- sym.initialize
+ // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed
+ for (tp1 @ TypeRef(_, sym, _) <- lo) {
+ if (settings.breakCycles.value) {
+ if (!sym.maybeInitialize) {
+ log(s"Cycle inspecting $lo for possible f-bounds: ${sym.fullLocationString}")
+ return sym
+ }
+ }
+ else sym.initialize
+ }
case _ =>
}
- tp
+ NoSymbol
}
def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
+ // this early test is there to avoid infinite baseTypes when
+ // adding setters and getters --> bug798
+ // It is a def in an attempt to provide some insulation against
+ // uninitialized symbols misleading us. It is not a certainty
+ // this accomplishes anything, but performance is a non-consideration
+ // on these flag checks so it can't hurt.
+ def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential
logAndValidate(sym) {
- val tp = initializeLowerBounds(typeSig(tree))
+ val tp = typeSig(tree)
+
+ findCyclicalLowerBound(tp) andAlso { sym =>
+ if (needsCycleCheck) {
+ // neg/t1224: trait C[T] ; trait A { type T >: C[T] <: C[C[T]] }
+ // To avoid an infinite loop on the above, we cannot break all cycles
+ log(s"Reinitializing info of $sym to catch any genuine cycles")
+ sym reset sym.info
+ sym.initialize
+ }
+ }
sym setInfo {
if (sym.isJavaDefined) RestrictJavaArraysMap(tp)
else tp
}
- // this early test is there to avoid infinite baseTypes when
- // adding setters and getters --> bug798
- val needsCycleCheck = (sym.isAliasType || sym.isAbstractType) && !sym.isParameter
- if (needsCycleCheck && !typer.checkNonCyclic(tree.pos, tp))
- sym setInfo ErrorType
+ if (needsCycleCheck) {
+ log(s"Needs cycle check: ${sym.debugLocationString}")
+ if (!typer.checkNonCyclic(tree.pos, tp))
+ sym setInfo ErrorType
+ }
}
- // tree match {
- // case ClassDef(_, _, _, impl) =>
- // val parentsOK = (
- // treeInfo.isInterface(sym, impl.body)
- // || (sym eq ArrayClass)
- // || (sym isSubClass AnyValClass)
- // )
- // if (!parentsOK)
- // ensureParent(sym, AnyRefClass)
- // case _ => ()
- // }
}
def moduleClassTypeCompleter(tree: ModuleDef) = {
@@ -802,7 +806,7 @@ trait Namers extends MethodSynthesis {
false
}
- val tpe1 = dropRepeatedParamType(tpe.deconst)
+ val tpe1 = dropIllegalStarTypes(tpe.deconst)
val tpe2 = tpe1.widen
// This infers Foo.type instead of "object Foo"
@@ -845,7 +849,7 @@ trait Namers extends MethodSynthesis {
val sym = (
if (hasType || hasName) {
- owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe
+ owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe_*
val selfSym = owner.thisSym setPos self.pos
if (hasName) selfSym setName name else selfSym
}
@@ -860,16 +864,11 @@ trait Namers extends MethodSynthesis {
private def templateSig(templ: Template): Type = {
val clazz = context.owner
def checkParent(tpt: Tree): Type = {
- val tp = tpt.tpe
- val inheritsSelf = tp.typeSymbol == owner
- if (inheritsSelf)
- InheritsItselfError(tpt)
-
- if (inheritsSelf || tp.isError) AnyRefClass.tpe
- else tp
+ if (tpt.tpe.isError) AnyRefClass.tpe
+ else tpt.tpe
}
- val parents = typer.parentTypes(templ) map checkParent
+ val parents = typer.typedParentTypes(templ) map checkParent
enterSelf(templ.self)
@@ -895,11 +894,10 @@ trait Namers extends MethodSynthesis {
val modClass = companionSymbolOf(clazz, context).moduleClass
modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
val cdef = cma.caseClass
- def hasCopy(decls: Scope) = (decls lookup nme.copy) != NoSymbol
+ def hasCopy = (decls containsName nme.copy) || parents.exists(_ member nme.copy exists)
+
// SI-5956 needs (cdef.symbol == clazz): there can be multiple class symbols with the same name
- if (cdef.symbol == clazz && !hasCopy(decls) &&
- !parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
- !parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls)))
+ if (cdef.symbol == clazz && !hasCopy)
addCopyMethod(cdef, templateNamer)
}
}
@@ -945,9 +943,9 @@ trait Namers extends MethodSynthesis {
// Assign the moduleClass info (templateSig returns a ClassInfoType)
val clazz = moduleSym.moduleClass
clazz setInfo pluginsTp
- // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // clazz.tpe_* returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
// (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
- clazz.tpe
+ clazz.tpe_*
}
/**
@@ -1105,7 +1103,7 @@ trait Namers extends MethodSynthesis {
}
if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
+ tpt defineType context.enclClass.owner.tpe_*
tpt setPos meth.pos.focus
}
@@ -1141,7 +1139,7 @@ trait Namers extends MethodSynthesis {
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (meth.isTermMacro) {
+ if (meth.isMacro) {
typer.computeMacroDefType(ddef, resTpFromOverride)
}
@@ -1276,17 +1274,10 @@ trait Namers extends MethodSynthesis {
if (!isConstr)
methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
- if (forInteractive && default.owner.isTerm) {
- // save the default getters as attachments in the method symbol. if compiling the
- // same local block several times (which can happen in interactive mode) we might
- // otherwise not find the default symbol, because the second time it the method
- // symbol will be re-entered in the scope but the default parameter will not.
- val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
- case Some(att) => att.defaultGetters += default
- case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
- }
- }
- } else if (baseHasDefault) {
+ if (default.owner.isTerm)
+ saveDefaultGetter(meth, default)
+ }
+ else if (baseHasDefault) {
// the parameter does not have a default itself, but the
// corresponding parameter in the base class does.
sym.setFlag(DEFAULTPARAM)
@@ -1364,8 +1355,7 @@ trait Namers extends MethodSynthesis {
transformed(imp) = newImport
// copy symbol and type attributes back into old expression
// so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
+ expr setSymbol expr1.symbol setType expr1.tpe
ImportType(expr1)
}
}
@@ -1418,7 +1408,7 @@ trait Namers extends MethodSynthesis {
AnnotationInfo lazily {
val context1 = typer.context.make(ann)
context1.setReportErrors()
- beforeTyper(newTyper(context1) typedAnnotation ann)
+ enteringTyper(newTyper(context1) typedAnnotation ann)
}
}
if (ainfos.nonEmpty) {
@@ -1470,12 +1460,6 @@ trait Namers extends MethodSynthesis {
tpe
}
- def ensureParent(clazz: Symbol, parent: Symbol) = {
- val info0 = clazz.info
- val info1 = includeParent(info0, parent)
- if (info0 ne info1) clazz setInfo info1
- }
-
class LogTransitions[S](onEnter: S => String, onExit: S => String) {
val enabled = settings.debug.value
@inline final def apply[T](entity: S)(body: => T): T = {
@@ -1536,7 +1520,7 @@ trait Namers extends MethodSynthesis {
fail(ImplicitConstr)
if (!(sym.isTerm || (sym.isClass && !sym.isTrait)))
fail(ImplicitNotTermOrClass)
- if (sym.owner.isPackageClass)
+ if (sym.isTopLevel)
fail(ImplicitAtToplevel)
}
if (sym.isClass) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 1c60f0a79d..ce8e0ed37b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -8,7 +8,6 @@ package typechecker
import symtab.Flags._
import scala.collection.mutable
-import scala.ref.WeakReference
import scala.reflect.ClassTag
/**
@@ -42,13 +41,11 @@ trait NamesDefaults { self: Analyzer =>
blockTyper: Typer
) { }
- val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null)
-
- def nameOf(arg: Tree) = arg match {
- case AssignOrNamedArg(Ident(name), rhs) => Some(name)
- case _ => None
+ private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case AssignOrNamedArg(Ident(name), _) => name }
+ def isNamedArg(arg: Tree) = arg match {
+ case AssignOrNamedArg(Ident(_), _) => true
+ case _ => false
}
- def isNamed(arg: Tree) = nameOf(arg).isDefined
/** @param pos maps indices from old to new */
def reorderArgs[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
@@ -58,13 +55,13 @@ trait NamesDefaults { self: Analyzer =>
}
/** @param pos maps indices from new to old (!) */
- def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
+ private def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
val argsArray = args.toArray
(argsArray.indices map (i => argsArray(pos(i)))).toList
}
/** returns `true` if every element is equal to its index */
- def isIdentity(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
+ def allArgsArePositional(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
/**
* Transform a function application into a Block, and assigns typer.context
@@ -107,7 +104,7 @@ trait NamesDefaults { self: Analyzer =>
* @return the transformed application (a Block) together with the NamedApplyInfo.
* if isNamedApplyBlock(tree), returns the existing context.namedApplyBlockInfo
*/
- def transformNamedApplication(typer: Typer, mode: Int, pt: Type)
+ def transformNamedApplication(typer: Typer, mode: Mode, pt: Type)
(tree: Tree, argPos: Int => Int): Tree = {
import typer._
import typer.infer._
@@ -164,14 +161,14 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo qual.tpe
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos, newFlags = ARTIFACT) setInfo qual.tpe
blockTyper.context.scope enter sym
val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
// it stays in Vegas: SI-5720, SI-5727
qual changeOwner (blockTyper.context.owner -> sym)
val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name)))
- var baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
+ val baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
// setSymbol below is important because the 'selected' function might be overloaded. by
// assigning the correct method symbol, typedSelect will just assign the type. the reason
// to still call 'typed' is to correctly infer singleton types, SI-5259.
@@ -286,10 +283,10 @@ trait NamesDefaults { self: Analyzer =>
}
else
// Note stabilizing can lead to a non-conformant argument when existentials are involved, e.g. neg/t3507-old.scala, hence the filter.
- // We have to deconst or types inferred from literal arguments will be Constant(_), e.g. pos/z1730.scala.
- gen.stableTypeFor(arg).filter(_ <:< paramTpe).getOrElse(arg.tpe).deconst
- )
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ gen.stableTypeFor(arg).filter(_ <:< paramTpe).getOrElse(arg.tpe)
+ // We have to deconst or types inferred from literal arguments will be Constant(_), e.g. pos/z1730.scala.
+ ).deconst
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo (
if (byName) functionType(Nil, argTpe) else argTpe
)
Some((context.scope.enter(s), byName, repeated))
@@ -328,7 +325,7 @@ trait NamesDefaults { self: Analyzer =>
assert(isNamedApplyBlock(transformedFun), transformedFun)
val NamedApplyInfo(qual, targs, vargss, blockTyper) =
context.namedApplyBlockInfo.get._2
- val existingBlock @ Block(stats, funOnly) = transformedFun
+ val Block(stats, funOnly) = transformedFun
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
@@ -373,7 +370,7 @@ trait NamesDefaults { self: Analyzer =>
}
}
- def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOf _): (List[Symbol], Boolean) = {
+ def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = {
val namedArgs = args.dropWhile(arg => {
val n = argName(arg)
n.isEmpty || params.forall(p => p.name != n.get)
@@ -408,7 +405,7 @@ trait NamesDefaults { self: Analyzer =>
// TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope)
if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649
else {
- var default1 = qual match {
+ var default1: Tree = qual match {
case Some(q) => gen.mkAttributedSelect(q.duplicate, defGetter)
case None => gen.mkAttributedRef(defGetter)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index b9fdd7280e..b7221a78ec 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -71,7 +71,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
)
- def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.normalize, tp2.normalize) match {
+ def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
rtp1 <:< rtp2
case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
@@ -95,7 +95,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
- var localTyper: analyzer.Typer = typer;
+ var localTyper: analyzer.Typer = typer
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
var checkedCombinations = Set[List[Type]]()
@@ -133,7 +133,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
}
- if (settings.lint.value) {
+
+ // Check for doomed attempt to overload applyDynamic
+ if (clazz isSubClass DynamicClass) {
+ for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) {
+ unit.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)")
+ }
+ }
+
+ // This has become noisy with implicit classes.
+ if (settings.lint.value && settings.developer.value) {
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
val alts = clazz.info.decl(sym.name).alternatives
if (alts.size > 1)
@@ -240,7 +249,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case class MixinOverrideError(member: Symbol, msg: String)
- var mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
+ val mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
def printMixinOverrideErrors() {
mixinOverrideErrors.toList match {
@@ -377,11 +386,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (!isOverrideAccessOK) {
overrideAccessError()
} else if (other.isClass) {
- overrideError("cannot be used here - class definitions cannot be overridden");
+ overrideError("cannot be used here - class definitions cannot be overridden")
} else if (!other.isDeferred && member.isClass) {
- overrideError("cannot be used here - classes can only override abstract types");
+ overrideError("cannot be used here - classes can only override abstract types")
} else if (other.isEffectivelyFinal) { // (1.2)
- overrideError("cannot override final member");
+ overrideError("cannot override final member")
} else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
@@ -417,9 +426,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
member.isValue && !member.isLazy) {
overrideError("must be declared lazy to override a concrete lazy value")
} else if (other.isDeferred && member.isTermMacro) { // (1.9)
- overrideError("cannot override an abstract method")
+ overrideError("cannot be used here - term macros cannot override abstract methods")
} else if (other.isTermMacro && !member.isTermMacro) { // (1.10)
- overrideError("cannot override a macro")
+ overrideError("cannot be used here - only term macros can override term macros")
} else {
checkOverrideTypes()
checkOverrideDeprecated()
@@ -440,7 +449,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// @M: substSym
if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
- overrideTypeError();
+ overrideTypeError()
}
else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
@@ -465,12 +474,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// check a type alias's RHS corresponds to its declaration
// this overlaps somewhat with validateVariance
if(member.isAliasType) {
- // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner)))
- val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner)
+ // println("checkKindBounds" + ((List(member), List(memberTp.dealiasWiden), self, member.owner)))
+ val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.dealiasWiden), self, member.owner)
if(!kindErrors.isEmpty)
unit.error(member.pos,
- "The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+
+ "The kind of the right-hand side "+memberTp.dealiasWiden+" of "+member.keyString+" "+
member.varianceString + member.nameString+ " does not conform to its expected kind."+
kindErrors.toList.mkString("\n", ", ", ""))
} else if (member.isAbstractType) {
@@ -489,11 +498,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (member.isStable && !otherTp.isVolatile) {
if (memberTp.isVolatile)
overrideError("has a volatile type; cannot override a member with non-volatile type")
- else memberTp.normalize.resultType match {
+ else memberTp.dealiasWiden.resultType match {
case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
// might mask some inconsistencies -- check overrides
checkedCombinations += rt.parents
- val tsym = rt.typeSymbol;
+ val tsym = rt.typeSymbol
if (tsym.pos == NoPosition) tsym setPos member.pos
checkAllOverrides(tsym, typesOnly = true)
case _ =>
@@ -514,9 +523,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val opc = new overridingPairs.Cursor(clazz)
while (opc.hasNext) {
//Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
- if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden);
+ if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden)
- opc.next
+ opc.next()
}
printMixinOverrideErrors()
@@ -548,13 +557,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
- afterErasure(tp1 matches tp2)
+ exitingErasure(tp1 matches tp2)
})
def ignoreDeferred(member: Symbol) = (
(member.isAbstractType && !member.isFBounded) || (
member.isJavaDefined &&
- // the test requires afterErasure so shouldn't be
+ // the test requires exitingErasure so shouldn't be
// done if the compiler has no erasure phase available
(currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
)
@@ -717,8 +726,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) {
// For non-AnyVal classes, prevent abstract methods in interfaces that override
// final members in Object; see #4431
- for (decl <- clazz.info.decls.iterator) {
- val overridden = decl.overriddenSymbol(ObjectClass)
+ for (decl <- clazz.info.decls) {
+ // Have to use matchingSymbol, not a method involving overridden symbols,
+ // because the scala type system understands that an abstract method here does not
+ // override a concrete method in Object. The jvm, however, does not.
+ val overridden = decl.matchingSymbol(ObjectClass, ObjectClass.tpe)
if (overridden.isFinal)
unit.error(decl.pos, "trait cannot redefine final method from class AnyRef")
}
@@ -773,7 +785,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
- def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix)
nonMatching match {
case Nil =>
issueError("")
@@ -828,7 +840,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case tp1 :: tp2 :: _ =>
unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
" inherits different type instances of " + baseClass +
- ":\n" + tp1 + " and " + tp2);
+ ":\n" + tp1 + " and " + tp2)
explainTypes(tp1, tp2)
explainTypes(tp2, tp1)
}
@@ -837,161 +849,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Variance Checking --------------------------------------------------------
- private val ContraVariance = -1
- private val NoVariance = 0
- private val CoVariance = 1
- private val AnyVariance = 2
-
- private val escapedPrivateLocals = new mutable.HashSet[Symbol]
-
- val varianceValidator = new Traverser {
-
- /** Validate variance of info of symbol `base` */
- private def validateVariance(base: Symbol) {
- // A flag for when we're in a refinement, meaning method parameter types
- // need to be checked.
- var inRefinement = false
-
- def varianceString(variance: Int): String =
- if (variance == 1) "covariant"
- else if (variance == -1) "contravariant"
- else "invariant";
-
- /** The variance of a symbol occurrence of `tvar`
- * seen at the level of the definition of `base`.
- * The search proceeds from `base` to the owner of `tvar`.
- * Initially the state is covariant, but it might change along the search.
- */
- def relativeVariance(tvar: Symbol): Int = {
- val clazz = tvar.owner
- var sym = base
- var state = CoVariance
- while (sym != clazz && state != AnyVariance) {
- //Console.println("flip: " + sym + " " + sym.isParameter());//DEBUG
- // Flip occurrences of type parameters and parameters, unless
- // - it's a constructor, or case class factory or extractor
- // - it's a type parameter of tvar's owner.
- if (sym.isParameter && !sym.owner.isConstructor && !sym.owner.isCaseApplyOrUnapply &&
- !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem &&
- tvar.owner == sym.owner)) state = -state;
- else if (!sym.owner.isClass ||
- sym.isTerm && ((sym.isPrivateLocal || sym.isProtectedLocal || sym.isSuperAccessor /* super accessors are implicitly local #4345*/) && !(escapedPrivateLocals contains sym))) {
- // return AnyVariance if `sym` is local to a term
- // or is private[this] or protected[this]
- state = AnyVariance
- } else if (sym.isAliasType) {
- // return AnyVariance if `sym` is an alias type
- // that does not override anything. This is OK, because we always
- // expand aliases for variance checking.
- // However, if `sym` does override a type in a base class
- // we have to assume NoVariance, as there might then be
- // references to the type parameter that are not variance checked.
- state = if (sym.isOverridingSymbol) NoVariance else AnyVariance
- }
- sym = sym.owner
- }
- state
- }
-
- /** Validate that the type `tp` is variance-correct, assuming
- * the type occurs itself at variance position given by `variance`
- */
- def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType =>
- case WildcardType =>
- case BoundedWildcardType(bounds) =>
- validateVariance(bounds, variance)
- case NoType =>
- case NoPrefix =>
- case ThisType(_) =>
- case ConstantType(_) =>
- // case DeBruijnIndex(_, _) =>
- case SingleType(pre, sym) =>
- validateVariance(pre, variance)
- case TypeRef(pre, sym, args) =>
-// println("validate "+sym+" at "+relativeVariance(sym))
- if (sym.isAliasType/* && relativeVariance(sym) == AnyVariance*/)
- validateVariance(tp.normalize, variance)
- else if (sym.variance != NoVariance) {
- val v = relativeVariance(sym)
- if (v != AnyVariance && sym.variance != v * variance) {
- //Console.println("relativeVariance(" + base + "," + sym + ") = " + v);//DEBUG
- def tpString(tp: Type) = tp match {
- case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
- case _ => "type "+tp
- }
- unit.error(base.pos,
- varianceString(sym.variance) + " " + sym +
- " occurs in " + varianceString(v * variance) +
- " position in " + tpString(base.info) + " of " + base);
- }
- }
- validateVariance(pre, variance)
- // @M for higher-kinded typeref, args.isEmpty
- // However, these args respect variances by construction anyway
- // -- the interesting case is in type application, see checkKindBounds in Infer
- if (args.nonEmpty)
- validateVarianceArgs(args, variance, sym.typeParams)
- case ClassInfoType(parents, decls, symbol) =>
- validateVariances(parents, variance)
- case RefinedType(parents, decls) =>
- validateVariances(parents, variance)
- val saved = inRefinement
- inRefinement = true
- for (sym <- decls)
- validateVariance(sym.info, if (sym.isAliasType) NoVariance else variance)
- inRefinement = saved
- case TypeBounds(lo, hi) =>
- validateVariance(lo, -variance)
- validateVariance(hi, variance)
- case mt @ MethodType(formals, result) =>
- if (inRefinement)
- validateVariances(mt.paramTypes, -variance)
- validateVariance(result, variance)
- case NullaryMethodType(result) =>
- validateVariance(result, variance)
- case PolyType(tparams, result) =>
- // type parameters will be validated separately, because they are defined explicitly.
- validateVariance(result, variance)
- case ExistentialType(tparams, result) =>
- validateVariances(tparams map (_.info), variance)
- validateVariance(result, variance)
- case AnnotatedType(annots, tp, selfsym) =>
- if (!annots.exists(_ matches uncheckedVarianceClass))
- validateVariance(tp, variance)
- }
-
- def validateVariances(tps: List[Type], variance: Int) {
- tps foreach (tp => validateVariance(tp, variance))
- }
-
- def validateVarianceArgs(tps: List[Type], variance: Int, tparams: List[Symbol]) {
- foreach2(tps, tparams)((tp, tparam) => validateVariance(tp, variance * tparam.variance))
- }
-
- validateVariance(base.info, CoVariance)
+ object varianceValidator extends VarianceValidator {
+ private def tpString(tp: Type) = tp match {
+ case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
+ case _ => "type "+tp
}
-
- override def traverse(tree: Tree) {
- tree match {
- case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
- validateVariance(tree.symbol)
- super.traverse(tree)
- // ModuleDefs need not be considered because they have been eliminated already
- case ValDef(_, _, _, _) =>
- if (!tree.symbol.hasLocalFlag)
- validateVariance(tree.symbol)
- case DefDef(_, _, tparams, vparamss, _, _) =>
- // No variance check for object-private/protected methods/values.
- if (!tree.symbol.hasLocalFlag) {
- validateVariance(tree.symbol)
- traverseTrees(tparams)
- traverseTreess(vparamss)
- }
- case Template(_, _, _) =>
- super.traverse(tree)
- case _ =>
- }
+ override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance) {
+ currentRun.currentUnit.error(base.pos,
+ s"${sym.variance} $sym occurs in $required position in ${tpString(base.info)} of $base")
}
}
@@ -1040,7 +905,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val e = currentLevel.scope.lookupEntry(sym.name)
if ((e ne null) && sym == e.sym) {
var l = currentLevel
- while (l.scope != e.owner) l = l.outer;
+ while (l.scope != e.owner) l = l.outer
val symindex = symIndex(sym)
if (l.maxindex < symindex) {
l.refpos = pos
@@ -1111,15 +976,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
- // test is behind a platform guard
- def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass)
+ def isJavaNumber(s: Symbol) = s isSubClass JavaNumberClass
// includes java.lang.Number if appropriate [SI-5779]
def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
// used to short-circuit unrelatedTypes check if both sides are special
def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
- // unused
- def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
@@ -1157,7 +1019,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
nonSensiblyNeq()
}
else if (isNumeric(receiver)) {
- if (!isNumeric(actual) && !forMSIL)
+ if (!isNumeric(actual))
if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc"
nonSensiblyNeq()
}
@@ -1230,8 +1092,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
/* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
def toConstructor(pos: Position, tpe: Type): Tree = {
- var rtpe = tpe.finalResultType
- assert(rtpe.typeSymbol hasFlag CASE, tpe);
+ val rtpe = tpe.finalResultType
+ assert(rtpe.typeSymbol hasFlag CASE, tpe)
localTyper.typedOperator {
atPos(pos) {
Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor)
@@ -1249,57 +1111,61 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
finally popLevel()
}
- /** Eliminate ModuleDefs.
- * - A top level object is replaced with their module class.
- * - An inner object is transformed into a module var, created on first access.
+ /** Eliminate ModuleDefs. In all cases the ModuleDef (carrying a module symbol) is
+ * replaced with a ClassDef (carrying the corresponding module class symbol) with additional
+ * trees created as follows:
*
- * In both cases, this transformation returns the list of replacement trees:
- * - Top level: the module class accessor definition
- * - Inner: a class definition, declaration of module var, and module var accessor
+ * 1) A statically reachable object (either top-level or nested only in objects) receives
+ * no additional trees.
+ * 2) An inner object which matches an existing member (e.g. implements an interface)
+ * receives an accessor DefDef to implement the interface.
+ * 3) An inner object otherwise receives a private ValDef which declares a module var
+ * (the field which holds the module class - it has a name like Foo$module) and an
+ * accessor for that field. The instance is created lazily, on first access.
*/
- private def eliminateModuleDefs(tree: Tree): List[Tree] = {
- val ModuleDef(mods, name, impl) = tree
- val sym = tree.symbol
- val classSym = sym.moduleClass
- val cdef = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType
-
- def findOrCreateModuleVar() = localTyper.typedPos(tree.pos) {
- // See SI-5012, SI-6712.
+ private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks {
+ val ModuleDef(mods, name, impl) = moduleDef
+ val module = moduleDef.symbol
+ val site = module.owner
+ val moduleName = module.name.toTermName
+ // The typer doesn't take kindly to seeing this ClassDef; we have to
+ // set NoType so it will be ignored.
+ val cdef = ClassDef(module.moduleClass, impl) setType NoType
+
+ // Create the module var unless the immediate owner is a class and
+ // the module var already exists there. See SI-5012, SI-6712.
+ def findOrCreateModuleVar() = {
val vsym = (
- if (sym.owner.isTerm) NoSymbol
- else sym.enclClass.info.decl(nme.moduleVarName(sym.name.toTermName))
+ if (site.isTerm) NoSymbol
+ else site.info decl nme.moduleVarName(moduleName)
)
- // In case we are dealing with local symbol then we already have
- // to correct error with forward reference
- if (vsym == NoSymbol) gen.mkModuleVarDef(sym)
- else ValDef(vsym)
+ vsym orElse (site newModuleVarSymbol module)
}
- def createStaticModuleAccessor() = afterRefchecks {
- val method = (
- sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE)
- setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe)
- )
- localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym))
+ def newInnerObject() = {
+ // Create the module var unless it is already in the module owner's scope.
+ // The lookup is on module.enclClass and not module.owner lest there be a
+ // nullary method between us and the class; see SI-5012.
+ val moduleVar = findOrCreateModuleVar()
+ val rhs = gen.newModule(module, moduleVar.tpe)
+ val body = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs)
+ val accessor = DefDef(module, body.changeOwner(moduleVar -> module))
+
+ ValDef(moduleVar) :: accessor :: Nil
}
- def createInnerModuleAccessor(vdef: Tree) = List(
- vdef,
- localTyper.typedPos(tree.pos) {
- val vsym = vdef.symbol
- afterRefchecks {
- val rhs = gen.newModule(sym, vsym.tpe)
- val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs)
- DefDef(sym, body.changeOwner(vsym -> sym))
- }
- }
- )
- transformTrees(cdef :: {
- if (!sym.isStatic)
- createInnerModuleAccessor(findOrCreateModuleVar)
- else if (sym.isOverridingSymbol)
- List(createStaticModuleAccessor())
+ def matchingInnerObject() = {
+ val newFlags = (module.flags | STABLE) & ~MODULE
+ val newInfo = NullaryMethodType(module.moduleClass.tpe)
+ val accessor = site.newMethod(moduleName, module.pos, newFlags) setInfoAndEnter newInfo
+
+ DefDef(accessor, Select(This(site), module)) :: Nil
+ }
+ val newTrees = cdef :: (
+ if (module.isStatic)
+ if (module.isOverridingSymbol) matchingInnerObject() else Nil
else
- Nil
- })
+ newInnerObject()
+ )
+ transformTrees(newTrees map localTyper.typedPos(moduleDef.pos))
}
def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
@@ -1313,7 +1179,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
- val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
+ val tree1 = transform(tree) // important to do before forward reference check
if (tree1.symbol.isLazy) tree1 :: Nil
else {
val lazySym = tree.symbol.lazyAccessorOrSelf
@@ -1373,12 +1239,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
*/
private def checkMigration(sym: Symbol, pos: Position) = {
if (sym.hasMigrationAnnotation) {
- val changed = try
+ val changed = try
settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
catch {
- case e : NumberFormatException =>
+ case e : NumberFormatException =>
unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
- // if we can't parse the format on the migration annotation just conservatively assume it changed
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
true
}
if (changed)
@@ -1445,7 +1311,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// if the unnormalized type is accessible, that's good enough
if (inaccessible.isEmpty) ()
// or if the normalized type is, that's good too
- else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.normalize, member).isEmpty) ()
+ else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) ()
// otherwise warn about the inaccessible syms in the unnormalized type
else inaccessible foreach (sym => warnLessAccessible(sym, member))
}
@@ -1536,9 +1402,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case TypeApply(fun, targs) =>
isClassTypeAccessible(fun)
case Select(module, apply) =>
- // Fixes SI-5626. Classes in refinement types cannot be constructed with `new`. In this case,
- // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
- module.symbol.companionClass.isClass
+ ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`;
+ // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`.
+ treeInfo.isQualifierSafeToElide(module) &&
+ // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case,
+ // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
+ module.symbol.companionClass.isClass
+ )
}
val doTransform =
@@ -1582,7 +1452,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
tree
}
private def transformSelect(tree: Select): Tree = {
- val Select(qual, name) = tree
+ val Select(qual, _) = tree
val sym = tree.symbol
/** Note: if a symbol has both @deprecated and @migration annotations and both
@@ -1596,18 +1466,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
- if (sym eq NoSymbol) {
- unit.warning(tree.pos, "Select node has NoSymbol! " + tree + " / " + tree.tpe)
- }
- else if (currentClass != sym.owner && sym.hasLocalFlag) {
- var o = currentClass
- var hidden = false
- while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
- hidden = o.isTerm || o.isPrivateLocal
- o = o.owner
- }
- if (!hidden) escapedPrivateLocals += sym
- }
+ if (sym eq NoSymbol)
+ devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
+ else if (sym.hasLocalFlag)
+ varianceValidator.checkForEscape(sym, currentClass)
def checkSuper(mix: Name) =
// term should have been eliminated by super accessors
@@ -1640,8 +1502,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// on Unit, in which case we had better let it slide.
val isOk = (
sym.isGetter
- || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
|| (sym.name containsName nme.DEFAULT_GETTER_STRING)
+ || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
)
if (!isOk)
unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
@@ -1772,7 +1634,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
result match {
case ClassDef(_, _, _, _)
| TypeDef(_, _, _, _) =>
- if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
+ if (result.symbol.isLocal || result.symbol.isTopLevel)
varianceValidator.traverse(result)
case _ =>
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 64c5b41638..64fcda3b80 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -4,7 +4,127 @@ package typechecker
trait StdAttachments {
self: Analyzer =>
+ import global._
+
+ /** Carries information necessary to expand the host tree.
+ * At times we need to store this info, because macro expansion can be delayed until its targs are inferred.
+ * After a macro application has been successfully expanded, this attachment is destroyed.
+ */
type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
+
+ /** Scratchpad for the macro expander, which is used to store all intermediate data except the details about the runtime.
+ */
+ case class MacroExpanderAttachment(original: Tree, desugared: Tree, role: MacroRole)
+
+ /** Loads underlying MacroExpanderAttachment from a macro expandee or returns a default value for that attachment.
+ */
+ def macroExpanderAttachment(tree: Tree): MacroExpanderAttachment =
+ tree.attachments.get[MacroExpanderAttachment] getOrElse {
+ tree match {
+ case Apply(fn, _) if tree.isInstanceOf[ApplyToImplicitArgs] => macroExpanderAttachment(fn)
+ case _ => MacroExpanderAttachment(tree, EmptyTree, APPLY_ROLE)
+ }
+ }
+
+ /** After macro expansion is completed, links the expandee and the expansion result
+ * by annotating them both with a `MacroExpansionAttachment`.
+ */
+ def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree, role: MacroRole): Unit = {
+ val metadata = MacroExpanderAttachment(expandee, desugared, role)
+ expandee updateAttachment metadata
+ desugared updateAttachment metadata
+ }
+
+ /** Is added by the macro engine to originals and results of macro expansions.
+ * Stores the original expandee as it entered the `macroExpand` function.
+ */
+ case class MacroExpansionAttachment(expandee: Tree, expanded: Any)
+
+ /** Determines whether the target is either an original or a result of a macro expansion.
+ * The parameter is of type `Any`, because macros can expand both into trees and into annotations.
+ */
+ def hasMacroExpansionAttachment(any: Any): Boolean = any match {
+ case tree: Tree => tree.attachments.get[MacroExpansionAttachment].isDefined
+ case _ => false
+ }
+
+ /** After macro expansion is completed, links the expandee and the expansion result by annotating them both with a `MacroExpansionAttachment`.
+ * The `expanded` parameter is of type `Any`, because macros can expand both into trees and into annotations.
+ */
+ def linkExpandeeAndExpanded(expandee: Tree, expanded: Any): Unit = {
+ val metadata = MacroExpansionAttachment(expandee, expanded)
+ expandee updateAttachment metadata
+ expanded match {
+ case expanded: Tree => expanded updateAttachment metadata
+ case _ => // do nothing
+ }
+ }
+
+ /** Checks whether there is any tree resulting from a macro expansion and associated with the current tree.
+ */
+ object ExpandedIntoTree {
+ def unapply(tree: Tree): Option[Tree] = tree.attachments.get[MacroExpansionAttachment] match {
+ case Some(MacroExpansionAttachment(_, tree: Tree)) => Some(tree)
+ case _ => None
+ }
+ }
+
+ /** When present, suppresses macro expansion for the host.
+ * This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
+ *
+ * Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
+ * (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
+ */
+ case object SuppressMacroExpansionAttachment
+
+ /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it.
+ */
+ def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment)
+
+ /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children.
+ */
+ def unsuppressMacroExpansion(tree: Tree): Tree = {
+ tree.removeAttachment[SuppressMacroExpansionAttachment.type]
+ tree match {
+ // see the comment to `isMacroExpansionSuppressed` to learn why we need
+ // a special traversal strategy here
+ case Apply(fn, _) => unsuppressMacroExpansion(fn)
+ case TypeApply(fn, _) => unsuppressMacroExpansion(fn)
+ case _ => // do nothing
+ }
+ tree
+ }
+
+ /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
+ */
+ def isMacroExpansionSuppressed(tree: Tree): Boolean =
+ if (tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined) true
+ else tree match {
+ // we have to account for the fact that during typechecking an expandee might become wrapped,
+ // i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
+ // in that case the expandee itself will no longer be suppressed and we need to look at the core
+ case Apply(fn, _) => isMacroExpansionSuppressed(fn)
+ case TypeApply(fn, _) => isMacroExpansionSuppressed(fn)
+ case _ => false
+ }
+
+ /** After being synthesized by the parser, primary constructors aren't fully baked yet.
+ * A call to super in such constructors is just a fill-me-in-later dummy resolved later
+ * by `parentTypes`. This attachment coordinates `parentTypes` and `typedTemplate` and
+ * allows them to complete the synthesis.
+ */
+ case class SuperArgsAttachment(argss: List[List[Tree]])
+
+ /** Convenience method for `SuperArgsAttachment`.
+ * Compared with `MacroRuntimeAttachment` this attachment has different a usage pattern,
+ * so it really benefits from a dedicated extractor.
+ */
+ def superArgs(tree: Tree): Option[List[List[Tree]]] =
+ tree.attachments.get[SuperArgsAttachment] collect { case SuperArgsAttachment(argss) => argss }
+
+ /** Determines whether the given tree has an associated SuperArgsAttachment.
+ */
+ def hasSuperArgs(tree: Tree): Boolean = superArgs(tree).nonEmpty
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 67639eb530..e8925ce2d0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -60,8 +60,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = qual.symbol
val supername = nme.superName(name)
val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse {
- debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
- val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE) setAlias sym
+ debuglog(s"add super acc ${sym.fullLocationString} to $clazz")
+ val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym
val tpe = clazz.thisType memberType sym match {
case t if sym.isModule && !sym.isMethod => NullaryMethodType(t)
case t => t
@@ -129,11 +129,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = sup.symbol
if (sym.isDeferred) {
- val member = sym.overridingSymbol(clazz);
+ val member = sym.overridingSymbol(clazz)
if (mix != tpnme.EMPTY || member == NoSymbol ||
!(member.isAbstractOverride && member.isIncompleteIn(clazz)))
unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
- "unless it is overridden by a member declared `abstract' and `override'");
+ "unless it is overridden by a member declared `abstract' and `override'")
} else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
// SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
@@ -186,18 +186,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym))
}
}
- if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) {
- println("========== scaladoc of "+sym+" =============================")
- println(toJavaDoc(expandedDocComment(sym)))
- for (member <- sym.info.members) {
- println(member+":"+sym.thisType.memberInfo(member)+"\n"+
- toJavaDoc(expandedDocComment(member, sym)))
- for ((useCase, comment, pos) <- useCases(member, sym)) {
- println("usecase "+useCase+":"+useCase.info)
- println(toJavaDoc(comment))
- }
- }
- }
super.transform(tree)
}
transformClassDef
@@ -224,7 +212,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
transformTemplate
case TypeApply(sel @ Select(This(_), name), args) =>
- mayNeedProtectedAccessor(sel, args, false)
+ mayNeedProtectedAccessor(sel, args, goToSuper = false)
// set a flag for all type parameters with `@specialized` annotation so it can be pickled
case typeDef: TypeDef if typeDef.symbol.deSkolemize.hasAnnotation(definitions.SpecializedClass) =>
@@ -274,7 +262,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
debuglog("alias replacement: " + tree + " ==> " + result); //debug
- localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
+ localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true))
} else {
/**
* A trait which extends a class and accesses a protected member
@@ -295,13 +283,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
&& !sym.owner.isTrait
&& (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
&& (qual.symbol.info.member(sym.name) ne NoSymbol)
- && !needsProtectedAccessor(sym, tree.pos))
+ && !needsProtectedAccessor(sym, tree.pos)
+ )
if (shouldEnsureAccessor) {
log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
ensureAccessor(sel)
}
else
- mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = false)
}
case Super(_, mix) =>
@@ -314,7 +303,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
transformSuperSelect(sel)
case _ =>
- mayNeedProtectedAccessor(sel, EmptyTree.asList, true)
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = true)
}
}
transformSelect
@@ -323,7 +312,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs)))
case TypeApply(sel @ Select(qual, name), args) =>
- mayNeedProtectedAccessor(sel, args, true)
+ mayNeedProtectedAccessor(sel, args, goToSuper = true)
case Assign(lhs @ Select(qual, name), rhs) =>
def transformAssign = {
@@ -331,8 +320,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
lhs.symbol.isJavaDefined &&
needsProtectedAccessor(lhs.symbol, tree.pos)) {
debuglog("Adding protected setter for " + tree)
- val setter = makeSetter(lhs);
- debuglog("Replaced " + tree + " with " + setter);
+ val setter = makeSetter(lhs)
+ debuglog("Replaced " + tree + " with " + setter)
transform(localTyper.typed(Apply(setter, List(qual, rhs))))
} else
super.transform(tree)
@@ -391,7 +380,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
* typed.
*/
private def makeAccessor(tree: Select, targs: List[Tree]): Tree = {
- val Select(qual, name) = tree
+ val Select(qual, _) = tree
val sym = tree.symbol
val clazz = hostForAccessorOf(sym, currentClass)
@@ -416,7 +405,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse {
- val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos)
+ val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos, newFlags = ARTIFACT)
newAcc setInfoAndEnter accType(newAcc)
val code = DefDef(newAcc, {
@@ -427,7 +416,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
args.foldLeft(base)(Apply(_, _))
})
- debuglog("" + code)
+ debuglog("created protected accessor: " + code)
storeAccessorDefinition(clazz, code)
newAcc
}
@@ -439,7 +428,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case _ => mkApply(TypeApply(selection, targs))
}
}
- debuglog("Replaced " + tree + " with " + res)
+ debuglog(s"Replaced $tree with $res")
if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
}
@@ -478,7 +467,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val accName = nme.protSetterName(field.originalName)
val protectedAccessor = clazz.info decl accName orElse {
- val protAcc = clazz.newMethod(accName, field.pos)
+ val protAcc = clazz.newMethod(accName, field.pos, newFlags = ARTIFACT)
val paramTypes = List(clazz.typeOfThis, field.tpe)
val params = protAcc newSyntheticValueParams paramTypes
val accessorType = MethodType(params, UnitClass.tpe)
@@ -510,9 +499,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
def accessibleThroughSubclassing =
validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait
- def packageAccessBoundry(sym: Symbol) =
- sym.accessBoundary(sym.enclosingPackageClass)
-
val isCandidate = (
sym.isProtected
&& sym.isJavaDefined
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 242eb9c9fe..a2b0530c26 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -6,9 +6,8 @@
package scala.tools.nsc
package typechecker
-import symtab.Flags
+import scala.collection.{ mutable, immutable }
import symtab.Flags._
-import scala.collection.mutable
import scala.collection.mutable.ListBuffer
/** Synthetic method implementations for case classes and case objects.
@@ -94,7 +93,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// like Tags and Arrays which are not robust and infer things
// which they shouldn't.
val accessorLub = (
- if (opt.experimental) {
+ if (settings.Xexperimental.value) {
global.weakLub(accessors map (_.tpe.finalResultType))._1 match {
case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
case tp => tp
@@ -121,20 +120,11 @@ trait SyntheticMethods extends ast.TreeDSL {
(m0 ne meth) && !m0.isDeferred && !m0.isSynthetic && (m0.owner != AnyValClass) && (typeInClazz(m0) matches typeInClazz(meth))
}
}
- def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = {
- clazzMember(newTermName(name)).info match {
- case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T]
- case _ => default
- }
- }
def productIteratorMethod = {
createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
)
}
- def projectionMethod(accessor: Symbol, num: Int) = {
- createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
- }
/** Common code for productElement and (currently disabled) productElementName
*/
@@ -238,10 +228,15 @@ trait SyntheticMethods extends ast.TreeDSL {
/** The _1, _2, etc. methods to implement ProductN, disabled
* until we figure out how to introduce ProductN without cycles.
*/
- def productNMethods = {
+ /****
+ def productNMethods = {
val accs = accessors.toIndexedSeq
1 to arity map (num => productProj(arity, num) -> (() => projectionMethod(accs(num - 1), num)))
}
+ def projectionMethod(accessor: Symbol, num: Int) = {
+ createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
+ }
+ ****/
// methods for both classes and objects
def productMethods = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index d82fbd7c77..d2d7f57aef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -10,16 +10,16 @@ trait Tags {
trait Tag {
self: Typer =>
- private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = beforeTyper {
+ private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
wrapper(inferImplicit(
EmptyTree,
taggedTp,
- /*reportAmbiguous =*/ true,
- /*isView =*/ false,
- /*context =*/ context,
- /*saveAmbiguousDivergent =*/ true,
- /*pos =*/ pos
+ reportAmbiguous = true,
+ isView = false,
+ context,
+ saveAmbiguousDivergent = true,
+ pos
).tree)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 88d10f1d72..5c863469e4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package typechecker
-import scala.tools.nsc.symtab.Flags._
import scala.collection.mutable
import mutable.ListBuffer
import util.returning
@@ -144,19 +143,12 @@ abstract class TreeCheckers extends Analyzer {
currentRun.units foreach (x => wrap(x)(check(x)))
}
- def printingTypings[T](body: => T): T = {
- val saved = global.printTypings
- global.printTypings = true
- val result = body
- global.printTypings = saved
- result
- }
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
- currentRun.advanceUnit
+ currentRun.advanceUnit()
assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit)
currentRun.currentUnit = unit0
}
@@ -164,7 +156,7 @@ abstract class TreeCheckers extends Analyzer {
informProgress("checking "+unit)
val context = rootContext(unit)
context.checking = true
- tpeOfTree.clear
+ tpeOfTree.clear()
SymbolTracker.check(phase, unit)
val checker = new TreeChecker(context)
runWithUnit(unit) {
@@ -189,10 +181,6 @@ abstract class TreeCheckers extends Analyzer {
errorFn(t1.pos, "trees differ\n old: " + treestr(t1) + "\n new: " + treestr(t2))
private def typesDiffer(tree: Tree, tp1: Type, tp2: Type) =
errorFn(tree.pos, "types differ\n old: " + tp1 + "\n new: " + tp2 + "\n tree: " + tree)
- private def ownersDiffer(tree: Tree, shouldBe: Symbol) = {
- val sym = tree.symbol
- errorFn(tree.pos, sym + " has wrong owner: " + ownerstr(sym.owner) + ", should be: " + ownerstr(shouldBe))
- }
/** XXX Disabled reporting of position errors until there is less noise. */
private def noPos(t: Tree) =
@@ -204,14 +192,11 @@ abstract class TreeCheckers extends Analyzer {
if (t.symbol == NoSymbol)
errorFn(t.pos, "no symbol: " + treestr(t))
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = returning(tree) {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = returning(tree) {
case EmptyTree | TypeTree() => ()
case _ if tree.tpe != null =>
- tpeOfTree.getOrElseUpdate(tree, {
- val saved = tree.tpe
- tree.tpe = null
- saved
- })
+ tpeOfTree.getOrElseUpdate(tree, try tree.tpe finally tree.clearType())
+
wrap(tree)(super.typed(tree, mode, pt) match {
case _: Literal => ()
case x if x ne tree => treesDiffer(tree, x)
@@ -284,7 +269,7 @@ abstract class TreeCheckers extends Analyzer {
def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner
if (sym.owner != currentOwner) {
- val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ")
+ val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse { fail("DefTree can't find owner: ") ; NoSymbol }
if (sym.owner != expected)
fail(sm"""|
| currentOwner chain: ${currentOwner.ownerChain take 3 mkString " -> "}
@@ -344,7 +329,7 @@ abstract class TreeCheckers extends Analyzer {
if (oldtpe =:= tree.tpe) ()
else typesDiffer(tree, oldtpe, tree.tpe)
- tree.tpe = oldtpe
+ tree setType oldtpe
super.traverse(tree)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 9376cb5237..46740cd03c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -8,7 +8,6 @@ package typechecker
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
import scala.util.control.Exception.ultimately
import symtab.Flags._
import PartialFunction._
@@ -37,15 +36,6 @@ trait TypeDiagnostics {
import global._
import definitions._
- import global.typer.{ infer, context }
-
- /** The common situation of making sure nothing is erroneous could be
- * nicer if Symbols, Types, and Trees all implemented some common interface
- * in which isErroneous and similar would be placed.
- */
- def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous)
- def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous)
- def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous)
/** For errors which are artifacts of the implementation: such messages
* indicate that the restriction may be lifted in the future.
@@ -58,7 +48,7 @@ trait TypeDiagnostics {
/** A map of Positions to addendums - if an error involves a position in
* the map, the addendum should also be printed.
*/
- private var addendums = perRunCaches.newMap[Position, () => String]()
+ private val addendums = perRunCaches.newMap[Position, () => String]()
private var isTyperInPattern = false
/** Devising new ways of communicating error info out of
@@ -174,11 +164,6 @@ trait TypeDiagnostics {
case xs => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
}
- def varianceWord(sym: Symbol): String =
- if (sym.variance == 1) "covariant"
- else if (sym.variance == -1) "contravariant"
- else "invariant"
-
def explainAlias(tp: Type) = {
// Don't automatically normalize standard aliases; they still will be
// expanded if necessary to disambiguate simple identifiers.
@@ -223,12 +208,12 @@ trait TypeDiagnostics {
// force measures than comparing normalized Strings were producing error messages
// like "and java.util.ArrayList[String] <: java.util.ArrayList[String]" but there
// should be a cleaner way to do this.
- if (found.normalize.toString == tp.normalize.toString) ""
+ if (found.dealiasWiden.toString == tp.dealiasWiden.toString) ""
else " (and %s <: %s)".format(found, tp)
)
val explainDef = {
val prepend = if (isJava) "Java-defined " else ""
- "%s%s is %s in %s.".format(prepend, reqsym, varianceWord(param), param)
+ "%s%s is %s in %s.".format(prepend, reqsym, param.variance, param)
}
// Don't suggest they change the class declaration if it's somewhere
// under scala.* or defined in a java class, because attempting either
@@ -248,11 +233,11 @@ trait TypeDiagnostics {
|| ((arg <:< reqArg) && param.isCovariant)
|| ((reqArg <:< arg) && param.isContravariant)
)
- val invariant = param.variance == 0
+ val invariant = param.variance.isInvariant
if (conforms) Some("")
- else if ((arg <:< reqArg) && invariant) mkMsg(true) // covariant relationship
- else if ((reqArg <:< arg) && invariant) mkMsg(false) // contravariant relationship
+ else if ((arg <:< reqArg) && invariant) mkMsg(isSubtype = true) // covariant relationship
+ else if ((reqArg <:< arg) && invariant) mkMsg(isSubtype = false) // contravariant relationship
else None // we assume in other cases our ham-fisted advice will merely serve to confuse
}
val messages = relationships.flatten
@@ -291,7 +276,6 @@ trait TypeDiagnostics {
// distinguished from the other types in the same error message
private val savedName = sym.name
def restoreName() = sym.name = savedName
- def isAltered = sym.name != savedName
def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString))
/** Prepend java.lang, scala., or Predef. if this type originated
@@ -421,6 +405,122 @@ trait TypeDiagnostics {
def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) =
contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
+ object checkUnused {
+ val ignoreNames = Set[TermName]("readResolve", "readObject", "writeObject", "writeReplace")
+
+ class UnusedPrivates extends Traverser {
+ val defnTrees = ListBuffer[MemberDef]()
+ val targets = mutable.Set[Symbol]()
+ val setVars = mutable.Set[Symbol]()
+ val treeTypes = mutable.Set[Type]()
+
+ def defnSymbols = defnTrees.toList map (_.symbol)
+ def localVars = defnSymbols filter (t => t.isLocal && t.isVar)
+
+ def qualifiesTerm(sym: Symbol) = (
+ (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocal)
+ && !nme.isLocalName(sym.name)
+ && !sym.isParameter
+ && !sym.isParamAccessor // could improve this, but it's a pain
+ && !sym.isEarlyInitialized // lots of false positives in the way these are encoded
+ && !(sym.isGetter && sym.accessed.isEarlyInitialized)
+ )
+ def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage
+ def qualifies(sym: Symbol) = (
+ (sym ne null)
+ && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym))
+ )
+
+ override def traverse(t: Tree): Unit = {
+ t match {
+ case t: MemberDef if qualifies(t.symbol) => defnTrees += t
+ case t: RefTree if t.symbol ne null => targets += t.symbol
+ case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol
+ case _ =>
+ }
+ // Only record type references which don't originate within the
+ // definition of the class being referenced.
+ if (t.tpe ne null) {
+ for (tp <- t.tpe ; if !treeTypes(tp) && !currentOwner.ownerChain.contains(tp.typeSymbol)) {
+ tp match {
+ case NoType | NoPrefix =>
+ case NullaryMethodType(_) =>
+ case MethodType(_, _) =>
+ case _ =>
+ log(s"$tp referenced from $currentOwner")
+ treeTypes += tp
+ }
+ }
+ // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused.
+ t.tpe.prefix foreach {
+ case SingleType(_, sym) => targets += sym
+ case _ =>
+ }
+ }
+ super.traverse(t)
+ }
+ def isUnusedType(m: Symbol): Boolean = (
+ m.isType
+ && !m.isTypeParameterOrSkolem // would be nice to improve this
+ && (m.isPrivate || m.isLocal)
+ && !(treeTypes.exists(tp => tp exists (t => t.typeSymbolDirect == m)))
+ )
+ def isUnusedTerm(m: Symbol): Boolean = (
+ (m.isTerm)
+ && (m.isPrivate || m.isLocal)
+ && !targets(m)
+ && !(m.name == nme.WILDCARD) // e.g. val _ = foo
+ && !ignoreNames(m.name.toTermName) // serialization methods
+ && !isConstantType(m.info.resultType) // subject to constant inlining
+ && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar
+ )
+ def unusedTypes = defnTrees.toList filter (t => isUnusedType(t.symbol))
+ def unusedTerms = defnTrees.toList filter (v => isUnusedTerm(v.symbol))
+ // local vars which are never set, except those already returned in unused
+ def unsetVars = localVars filter (v => !setVars(v) && !isUnusedTerm(v))
+ }
+
+ def apply(unit: CompilationUnit) = {
+ warnUnusedImports(unit)
+
+ val p = new UnusedPrivates
+ p traverse unit.body
+ val unused = p.unusedTerms
+ unused foreach { defn: DefTree =>
+ val sym = defn.symbol
+ val isDefaultGetter = sym.name containsName nme.DEFAULT_GETTER_STRING
+ val pos = (
+ if (defn.pos.isDefined) defn.pos
+ else if (sym.pos.isDefined) sym.pos
+ else sym match {
+ case sym: TermSymbol => sym.referenced.pos
+ case _ => NoPosition
+ }
+ )
+ val why = if (sym.isPrivate) "private" else "local"
+ val what = (
+ if (isDefaultGetter) "default argument"
+ else if (sym.isConstructor) "constructor"
+ else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var"
+ else if (sym.isVal || sym.isGetter && sym.accessed.isVal) "val"
+ else if (sym.isSetter) "setter"
+ else if (sym.isMethod) "method"
+ else if (sym.isModule) "object"
+ else "term"
+ )
+ unit.warning(pos, s"$why $what in ${sym.owner} is never used")
+ }
+ p.unsetVars foreach { v =>
+ unit.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val")
+ }
+ p.unusedTypes foreach { t =>
+ val sym = t.symbol
+ val why = if (sym.isPrivate) "private" else "local"
+ unit.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
+ }
+ }
+ }
+
object checkDead {
private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol)
// The method being applied to `tree` when `apply` is called.
@@ -446,12 +546,12 @@ trait TypeDiagnostics {
// It is presumed if you are using a -Y option you would really like to hear
// the warnings you've requested.
if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK)
- context.warning(tree.pos, "dead code following this construct", true)
+ context.warning(tree.pos, "dead code following this construct", force = true)
tree
}
// The checkDead call from typedArg is more selective.
- def inMode(mode: Int, tree: Tree): Tree = {
+ def inMode(mode: Mode, tree: Tree): Tree = {
val modeOK = (mode & (EXPRmode | BYVALmode | POLYmode)) == (EXPRmode | BYVALmode)
if (modeOK) apply(tree)
else tree
@@ -476,7 +576,7 @@ trait TypeDiagnostics {
/** Report a type error.
*
- * @param pos0 The position where to report the error
+ * @param pos The position where to report the error
* @param ex The exception that caused the error
*/
def reportTypeError(context0: Context, pos: Position, ex: TypeError) {
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
index 60399f53bf..65a3fedbd2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
@@ -4,7 +4,7 @@
*/
package scala.tools.nsc
-package interpreter
+package typechecker
import java.lang.{ reflect => r }
import r.TypeVariable
@@ -12,16 +12,12 @@ import scala.reflect.NameTransformer
import NameTransformer._
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
-import typechecker.DestructureTypes
-import scala.reflect.internal.util.StringOps.ojoin
-import scala.language.implicitConversions
/** A more principled system for turning types into strings.
*/
trait StructuredTypeStrings extends DestructureTypes {
val global: Global
import global._
- import definitions._
case class LabelAndType(label: String, typeName: String) { }
object LabelAndType {
@@ -33,13 +29,11 @@ trait StructuredTypeStrings extends DestructureTypes {
else elems.mkString(ldelim, mdelim, rdelim)
)
}
- val NoGrouping = Grouping("", "", "", false)
- val ListGrouping = Grouping("(", ", ", ")", false)
- val ProductGrouping = Grouping("(", ", ", ")", true)
- val ParamGrouping = Grouping("(", ", ", ")", true)
- val BlockGrouping = Grouping(" { ", "; ", "}", false)
+ val NoGrouping = Grouping("", "", "", labels = false)
+ val ListGrouping = Grouping("(", ", ", ")", labels = false)
+ val ProductGrouping = Grouping("(", ", ", ")", labels = true)
+ val BlockGrouping = Grouping(" { ", "; ", "}", labels = false)
- private implicit def lowerName(n: Name): String = "" + n
private def str(level: Int)(body: => String): String = " " * level + body
private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
val l1 = str(level)(name + grouping.ldelim)
@@ -49,7 +43,6 @@ trait StructuredTypeStrings extends DestructureTypes {
l1 +: l2 :+ l3 mkString "\n"
}
private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
- import grouping._
val threshold = 70
val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*))
@@ -57,10 +50,9 @@ trait StructuredTypeStrings extends DestructureTypes {
else block(level, grouping)(name, nodes)
}
private def shortClass(x: Any) = {
- if (opt.debug) {
+ if (settings.debug.value) {
val name = (x.getClass.getName split '.').last
- val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit)
- val str = if (isAnon) name else (name split '$').last
+ val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last
" // " + str
}
@@ -72,7 +64,7 @@ trait StructuredTypeStrings extends DestructureTypes {
def nodes: List[TypeNode]
def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes)
- def show(indent: Int): String = show(indent, true)
+ def show(indent: Int): String = show(indent, showLabel = true)
def show(): String = show(0)
def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l))
@@ -146,7 +138,7 @@ trait StructuredTypeStrings extends DestructureTypes {
def wrapAtom[U](value: U) = new TypeAtom(value)
}
- def show(tp: Type): String = intoNodes(tp).show
+ def show(tp: Type): String = intoNodes(tp).show()
}
@@ -158,11 +150,11 @@ trait StructuredTypeStrings extends DestructureTypes {
* "definition" is when you want strings like
*/
trait TypeStrings {
+ private type JClass = java.lang.Class[_]
private val ObjectClass = classOf[java.lang.Object]
private val primitives = Set[String]("byte", "char", "short", "int", "long", "float", "double", "boolean", "void")
private val primitiveMap = primitives.toList map { x =>
val key = x match {
- case "void" => "Void"
case "int" => "Integer"
case "char" => "Character"
case s => s.capitalize
@@ -175,6 +167,11 @@ trait TypeStrings {
("java.lang." + key) -> ("scala." + value)
} toMap
+ def isAnonClass(cl: Class[_]) = {
+ val xs = cl.getName.reverse takeWhile (_ != '$')
+ xs.nonEmpty && xs.forall(_.isDigit)
+ }
+
def scalaName(s: String): String = {
if (s endsWith MODULE_SUFFIX_STRING) s.init + ".type"
else if (s == "void") "scala.Unit"
@@ -184,18 +181,16 @@ trait TypeStrings {
// Trying to put humpty dumpty back together again.
def scalaName(clazz: JClass): String = {
val name = clazz.getName
- val isAnon = clazz.isScalaAnonymous
val enclClass = clazz.getEnclosingClass
def enclPre = enclClass.getName + MODULE_SUFFIX_STRING
def enclMatch = name startsWith enclPre
scalaName(
- if (enclClass == null || isAnon || !enclMatch) name
+ if (enclClass == null || isAnonClass(clazz) || !enclMatch) name
else enclClass.getName + "." + (name stripPrefix enclPre)
)
}
- def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass)
- def anyClass(x: Any): JClass = if (x == null) null else x.getClass
+ def anyClass(x: Any): JClass = if (x == null) null else x.getClass
private def brackets(tps: String*): String =
if (tps.isEmpty) ""
@@ -212,14 +207,8 @@ trait TypeStrings {
}
private def tparamString[T: ru.TypeTag] : String = {
- def typeArguments: List[ru.Type] = {
- import ru.TypeRefTag // otherwise the pattern match will be unchecked
- // because TypeRef is an abstract type
- ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
- }
- // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader
- // how do I get to it? acquiring context classloader seems unreliable because of multithreading
- def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => ru.rootMirror.runtimeClass(targ))
+ import ru._ // get TypeRefTag in scope so that pattern match works (TypeRef is an abstract type)
+ def typeArguments: List[ru.Type] = ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
brackets(typeArguments map (jc => tvarString(List(jc))): _*)
}
@@ -231,7 +220,6 @@ trait TypeStrings {
* practice to rely on toString for correctness) generated the VALID string
* representation of the type.
*/
- def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T]
def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value))
def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz)
def fromTag[T: ru.TypeTag : ClassTag] : String = scalaName(classTag[T].runtimeClass) + tparamString[T]
@@ -251,13 +239,6 @@ trait TypeStrings {
case (res, (k, v)) => res.replaceAll(k, v)
}
}
-
- val typeTransforms = List(
- "java.lang." -> "",
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic."
- )
}
object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index d8493d2312..92f53f4956 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -13,9 +13,10 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance }
import mutable.ListBuffer
import symtab.Flags._
+import Mode._
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -24,14 +25,14 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Modes with Adaptations with Tags {
+trait Typers extends Adaptations with Tags {
self: Analyzer =>
import global._
import definitions._
import TypersStats._
- final def forArgMode(fun: Tree, mode: Int) =
+ final def forArgMode(fun: Tree, mode: Mode) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
else mode
@@ -52,35 +53,39 @@ trait Typers extends Modes with Adaptations with Tags {
object UnTyper extends Traverser {
override def traverse(tree: Tree) = {
- if (tree != EmptyTree) tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
+ if (tree.canHaveAttrs) {
+ tree.clearType()
+ if (tree.hasSymbolField) tree.symbol = NoSymbol
+ }
super.traverse(tree)
}
}
-/* needed for experimental version where early types can be type arguments
- class EarlyMap(clazz: Symbol) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) =>
- TypeRef(ThisType(clazz), sym, List())
- case _ =>
- mapOver(tp)
+
+ sealed abstract class SilentResult[+T] {
+ @inline final def map[U](f: T => U): SilentResult[U] = this match {
+ case SilentResultValue(value) => SilentResultValue(f(value))
+ case x: SilentTypeError => x
+ }
+ @inline final def filter(p: T => Boolean): SilentResult[T] = this match {
+ case SilentResultValue(value) if !p(value) => SilentTypeError(TypeErrorWrapper(new TypeError(NoPosition, "!p")))
+ case _ => this
+ }
+ @inline final def orElse[T1 >: T](f: AbsTypeError => T1): T1 = this match {
+ case SilentResultValue(value) => value
+ case SilentTypeError(err) => f(err)
}
}
-*/
-
- sealed abstract class SilentResult[+T]
case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { }
case class SilentResultValue[+T](value: T) extends SilentResult[T] { }
def newTyper(context: Context): Typer = new NormalTyper(context)
+
private class NormalTyper(context : Context) extends Typer(context)
// A transient flag to mark members of anonymous classes
// that are turned private by typedBlock
private final val SYNTHETIC_PRIVATE = TRANS_FLAG
- private def isPastTyper = phase.id > currentRun.typerPhase.id
-
// To enable decent error messages when the typer crashes.
// TODO - this only catches trees which go through def typed,
// but there are all kinds of back ways - typedClassDef, etc. etc.
@@ -90,19 +95,27 @@ trait Typers extends Modes with Adaptations with Tags {
// when true:
// - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
// - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933)
- private def newPatternMatching = opt.virtPatmat && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
+ // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
+ protected def newPatternMatching = true // presently overridden in the presentation compiler
abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine }
import TyperErrorGen._
+ /** Overridden to false in scaladoc and/or interactive. */
+ def canAdaptConstantTypeToLiteral = true
+ def canTranslateEmptyListToNil = true
+ def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree
+
+ def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
+ typed(docDef.definition, mode, pt)
+
val infer = new Inferencer(context0) {
override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
tp.isError || pt.isError ||
context0.implicitsEnabled && // this condition prevents chains of views
- inferView(EmptyTree, tp, pt, false) != EmptyTree
+ inferView(EmptyTree, tp, pt, reportAmbiguous = false) != EmptyTree
}
}
@@ -115,10 +128,7 @@ trait Typers extends Modes with Adaptations with Tags {
// paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would
// hide some valid errors for params preceding the erroneous one.
var paramFailed = false
-
- def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
- def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
- var mkArg: (Tree, Name) => Tree = mkPositionalArg
+ var mkArg: (Name, Tree) => Tree = (_, tree) => tree
// DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
//
@@ -129,13 +139,13 @@ trait Typers extends Modes with Adaptations with Tags {
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
+ val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context)
argResultsBuff += res
if (res.isSuccess) {
- argBuff += mkArg(res.tree, param.name)
+ argBuff += mkArg(param.name, res.tree)
} else {
- mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
+ mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
if (!param.hasDefault && !paramFailed) {
context.errBuffer.find(_.kind == ErrorKinds.Divergent) match {
case Some(divergentImplicit) =>
@@ -172,9 +182,9 @@ trait Typers extends Modes with Adaptations with Tags {
}
def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
- inferView(tree, from, to, reportAmbiguous, true)
+ inferView(tree, from, to, reportAmbiguous, saveErrors = true)
- /** Infer an implicit conversion (``view'') between two types.
+ /** Infer an implicit conversion (`view`) between two types.
* @param tree The tree which needs to be converted.
* @param from The source type of the conversion
* @param to The target type of the conversion
@@ -194,7 +204,7 @@ trait Typers extends Modes with Adaptations with Tags {
case PolyType(_, _) => EmptyTree
case _ =>
def wrapImplicit(from: Type): Tree = {
- val result = inferImplicit(tree, functionType(from :: Nil, to), reportAmbiguous, true, context, saveErrors)
+ val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent = saveErrors)
if (result.subst != EmptyTreeTypeSubstituter) {
result.subst traverse tree
notifyUndetparamsInferred(result.subst.from, result.subst.to)
@@ -232,10 +242,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => tp
}
- /** Check that <code>tree</code> is a stable expression.
- *
- * @param tree ...
- * @return ...
+ /** Check that `tree` is a stable expression.
*/
def checkStable(tree: Tree): Tree = (
if (treeInfo.isExprSafeToInline(tree)) tree
@@ -247,7 +254,7 @@ trait Typers extends Modes with Adaptations with Tags {
* of its symbol was not volatile?
*/
protected def isStableExceptVolatile(tree: Tree) = {
- tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
+ tree.hasSymbolField && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
{ val savedTpe = tree.symbol.info
val savedSTABLE = tree.symbol getFlag STABLE
tree.symbol setInfo AnyRefClass.tpe
@@ -289,16 +296,11 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- /** Check that type <code>tp</code> is not a subtype of itself.
- *
- * @param pos ...
- * @param tp ...
- * @return <code>true</code> if <code>tp</code> is not a subtype of itself.
+ /** Check that type `tp` is not a subtype of itself.
*/
def checkNonCyclic(pos: Position, tp: Type): Boolean = {
def checkNotLocked(sym: Symbol) = {
- sym.initialize
- sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
+ sym.initialize.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
}
tp match {
case TypeRef(pre, sym, args) =>
@@ -309,12 +311,6 @@ trait Typers extends Modes with Adaptations with Tags {
case SingleType(pre, sym) =>
checkNotLocked(sym)
-/*
- case TypeBounds(lo, hi) =>
- var ok = true
- for (t <- lo) ok = ok & checkNonCyclic(pos, t)
- ok
-*/
case st: SubType =>
checkNonCyclic(pos, st.supertype)
case ct: CompoundType =>
@@ -325,19 +321,19 @@ trait Typers extends Modes with Adaptations with Tags {
}
def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
- if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false
+ if (!lockedSym.lock(CyclicReferenceError(pos, tp, lockedSym))) false
else checkNonCyclic(pos, tp)
} finally {
lockedSym.unlock()
}
def checkNonCyclic(sym: Symbol) {
- if (!checkNonCyclic(sym.pos, sym.tpe)) sym.setInfo(ErrorType)
+ if (!checkNonCyclic(sym.pos, sym.tpe_*)) sym.setInfo(ErrorType)
}
def checkNonCyclic(defn: Tree, tpt: Tree) {
if (!checkNonCyclic(defn.pos, tpt.tpe, defn.symbol)) {
- tpt.tpe = ErrorType
+ tpt setType ErrorType
defn.symbol.setInfo(ErrorType)
}
}
@@ -368,28 +364,13 @@ trait Typers extends Modes with Adaptations with Tags {
private var scope: Scope = _
private var hiddenSymbols: List[Symbol] = _
- /** Check that type <code>tree</code> does not refer to private
+ /** Check that type `tree` does not refer to private
* components unless itself is wrapped in something private
- * (<code>owner</code> tells where the type occurs).
- *
- * @param owner ...
- * @param tree ...
- * @return ...
+ * (`owner` tells where the type occurs).
*/
def privates[T <: Tree](owner: Symbol, tree: T): T =
check(owner, EmptyScope, WildcardType, tree)
- /** Check that type <code>tree</code> does not refer to entities
- * defined in scope <code>scope</code>.
- *
- * @param scope ...
- * @param pt ...
- * @param tree ...
- * @return ...
- */
- def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T =
- check(NoSymbol, scope, pt, tree)
-
private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
this.owner = owner
this.scope = scope
@@ -465,7 +446,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
/** The qualifying class
- * of a this or super with prefix <code>qual</code>.
+ * of a this or super with prefix `qual`.
* packageOk is equal false when qualifying class symbol
*/
def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
@@ -551,13 +532,13 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /** Does the context of tree <code>tree</code> require a stable type?
+ /** Does the context of tree `tree` require a stable type?
*/
- private def isStableContext(tree: Tree, mode: Int, pt: Type) =
- isNarrowable(tree.tpe) && ((mode & (EXPRmode | LHSmode)) == EXPRmode) &&
+ private def isStableContext(tree: Tree, mode: Mode, pt: Type) =
+ isNarrowable(tree.tpe) && mode.inExprMode && mode.inNone(LHSmode) &&
(xtypes ||
(pt.isStable ||
- (mode & QUALmode) != 0 && !tree.symbol.isConstant ||
+ mode.inAll(QUALmode) && !tree.symbol.isConstant ||
pt.typeSymbol.isAbstractType && pt.bounds.lo.isStable && !(tree.tpe <:< pt)) ||
pt.typeSymbol.isRefinementClass && !(tree.tpe <:< pt))
@@ -570,11 +551,13 @@ trait Typers extends Modes with Adaptations with Tags {
* @return modified tree and new prefix type
*/
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
- if (isInPackageObject(sym, pre.typeSymbol)) {
+ if (context.isInPackageObject(sym, pre.typeSymbol)) {
if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) {
// short cut some aliases. It seems pattern matching needs this
// to notice exhaustiveness and to generate good code when
// List extractors are mixed with :: patterns. See Test5 in lists.scala.
+ //
+ // TODO SI-6609 Eliminate this special case once the old pattern matcher is removed.
def dealias(sym: Symbol) =
(atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
sym.name match {
@@ -603,41 +586,21 @@ trait Typers extends Modes with Adaptations with Tags {
(checkAccessible(tree, sym, pre, site), pre)
}
- /** Is `sym` defined in package object of package `pkg`?
- */
- private def isInPackageObject(sym: Symbol, pkg: Symbol) = {
- def isInPkgObj(sym: Symbol) =
- !sym.owner.isPackage && {
- sym.owner.isPackageObjectClass &&
- sym.owner.owner == pkg ||
- pkg.isInitialized && {
- // need to be careful here to not get a cyclic reference during bootstrap
- val pkgobj = pkg.info.member(nme.PACKAGEkw)
- pkgobj.isInitialized &&
- (pkgobj.info.member(sym.name).alternatives contains sym)
- }
- }
- pkg.isPackageClass && {
- if (sym.isOverloaded) sym.alternatives forall isInPkgObj
- else isInPkgObj(sym)
- }
- }
-
/** Post-process an identifier or selection node, performing the following:
* 1. Check that non-function pattern expressions are stable
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
- private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
- if (tree.symbol.isOverloaded && !inFunMode(mode))
+ private def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+ if (tree.symbol.isOverloaded && !mode.inFunMode)
inferExprAlternative(tree, pt)
val sym = tree.symbol
def fail() = NotAValueError(tree, sym)
if (tree.isErrorTyped) tree
- else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
+ else if (mode.inPatternNotFunMode && tree.isTerm) { // (1)
if (sym.isValue) {
val tree1 = checkStable(tree)
// A module reference in a pattern has type Foo.type, not "object Foo"
@@ -671,13 +634,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => !phase.erasedTypes
}
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def stabilizeFun(tree: Tree, mode: Int, pt: Type): Tree = {
+ def stabilizeFun(tree: Tree, mode: Mode, pt: Type): Tree = {
val sym = tree.symbol
val pre = tree match {
case Select(qual, _) => qual.tpe
@@ -774,7 +731,7 @@ trait Typers extends Modes with Adaptations with Tags {
featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
def action(): Boolean = {
- def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, true, false, context).isSuccess
+ def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess
def hasOption = settings.language.value exists (s => s == featureName || s == "_")
val OK = hasImport || hasOption
if (!OK) {
@@ -845,7 +802,7 @@ trait Typers extends Modes with Adaptations with Tags {
* (14) When in mode EXPRmode, apply a view
* If all this fails, error
*/
- protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = {
+ protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = {
def adaptToImplicitMethod(mt: MethodType): Tree = {
if (context.undetparams.nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
@@ -860,29 +817,28 @@ trait Typers extends Modes with Adaptations with Tags {
// avoid throwing spurious DivergentImplicit errors
if (context.hasErrors)
- return setError(tree)
-
- withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 =>
- if (original != EmptyTree && pt != WildcardType)
- typer1.silent(tpr => {
+ setError(tree)
+ else
+ withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 =>
+ if (original != EmptyTree && pt != WildcardType) (
+ typer1 silent { tpr =>
val withImplicitArgs = tpr.applyImplicitArgs(tree)
if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
else tpr.typed(withImplicitArgs, mode, pt)
- }) match {
- case SilentResultValue(result) =>
- result
- case _ =>
+ }
+ orElse { _ =>
debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
// we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
- tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
+ tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
}
+ )
else
typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+ )
}
- }
def instantiateToMethodType(mt: MethodType): Tree = {
val meth = tree match {
@@ -890,11 +846,10 @@ trait Typers extends Modes with Adaptations with Tags {
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor && !meth.isTermMacro && isFunctionType(pt)) { // (4.2)
+ if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree, this)
- // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
if (context.undetparams.nonEmpty) {
// #2624: need to infer type arguments for eta expansion of a polymorphic method
@@ -916,13 +871,13 @@ trait Typers extends Modes with Adaptations with Tags {
}
def adaptType(): Tree = {
- if (inFunMode(mode)) {
+ if (mode.inFunMode) {
// todo. the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
// because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
// but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
// tree setType tree.tpe.normalize
tree
- } else if (tree.hasSymbol && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) &&
+ } else if (tree.hasSymbolField && !tree.symbol.typeParams.isEmpty && !mode.inHKMode &&
!(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7)
// @M When not typing a higher-kinded type ((mode & HKmode) == 0)
// or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
@@ -930,8 +885,8 @@ trait Typers extends Modes with Adaptations with Tags {
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
MissingTypeParametersError(tree)
} else if ( // (7.1) @M: check kind-arity
- // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
- (inHKMode(mode)) &&
+ // @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
+ mode.inHKMode &&
// @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
// (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
!sameLength(tree.tpe.typeParams, pt.typeParams) &&
@@ -980,9 +935,11 @@ trait Typers extends Modes with Adaptations with Tags {
def adaptConstrPattern(): Tree = { // (5)
def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
- // if the tree's symbol's type does not define an extractor, maybe the tree's type does
- // this is the case when we encounter an arbitrary tree as the target of an unapply call (rather than something that looks like a constructor call)
- // (for now, this only happens due to wrapClassTagUnapply, but when we support parameterized extractors, it will become more common place)
+ // if the tree's symbol's type does not define an extractor, maybe the tree's type does.
+ // this is the case when we encounter an arbitrary tree as the target of an unapply call
+ // (rather than something that looks like a constructor call.) (for now, this only happens
+ // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
+ // more common place)
val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
if (extractor != NoSymbol) {
// if we did some ad-hoc overloading resolution, update the tree's symbol
@@ -992,27 +949,28 @@ trait Typers extends Modes with Adaptations with Tags {
tree setSymbol overloadedExtractorOfObject
tree.tpe match {
- case OverloadedType(pre, alts) => tree.tpe = overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
+ case OverloadedType(pre, alts) => tree setType overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
case _ =>
}
val unapply = unapplyMember(extractor.tpe)
val clazz = unapplyParameterType(unapply)
- if (unapply.isCase && clazz.isCase && !(clazz.ancestors exists (_.isCase))) {
+ if (unapply.isCase && clazz.isCase) {
// convert synthetic unapply of case class to case class constructor
val prefix = tree.tpe.prefix
val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
.setOriginal(tree)
val skolems = new mutable.ListBuffer[TypeSymbol]
- object variantToSkolem extends VariantTypeMap {
+ object variantToSkolem extends TypeMap(trackVariance = true) {
def apply(tp: Type) = mapOver(tp) match {
- case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
+ case TypeRef(NoPrefix, tpSym, Nil) if !variance.isInvariant && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
// must initialize or tpSym.tpe might see random type params!!
// without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
// TODO: why is that??
tpSym.initialize
- val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
+ val bounds = if (variance.isPositive) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
// origin must be the type param so we can deskolemize
val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
// println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
@@ -1055,7 +1013,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
def insertApply(): Tree = {
- assert(!inHKMode(mode), modeString(mode)) //@M
+ assert(!mode.inHKMode, mode) //@M
val adapted = adaptToName(tree, nme.apply)
def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
// TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
@@ -1083,26 +1041,26 @@ trait Typers extends Modes with Adaptations with Tags {
tree.tpe match {
case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
adaptAnnotations(tree, this, mode, pt)
- case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
+ case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0)
val sym = tree.symbol
if (sym != null && sym.isDeprecated) {
val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
unit.deprecationWarning(tree.pos, msg)
}
treeCopy.Literal(tree, value)
- case OverloadedType(pre, alts) if !inFunMode(mode) => // (1)
+ case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
inferExprAlternative(tree, pt)
adapt(tree, mode, pt, original)
case NullaryMethodType(restpe) => // (2)
adapt(tree setType restpe, mode, pt, original)
- case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2)
+ case TypeRef(_, ByNameParamClass, List(arg)) if mode.inExprMode => // (2)
adapt(tree setType arg, mode, pt, original)
case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
- case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
+ case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode | HKmode) => // (3)
// assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
// we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
// ticket #2197 triggered turning the assert into a guard
@@ -1121,18 +1079,19 @@ trait Typers extends Modes with Adaptations with Tags {
adaptToImplicitMethod(mt)
case mt: MethodType if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) &&
- (context.undetparams.isEmpty || inPolyMode(mode))) && !(tree.symbol != null && tree.symbol.isTermMacro) =>
+ (context.undetparams.isEmpty || mode.inPolyMode)) && !treeInfo.isMacroApplicationOrBlock(tree) =>
instantiateToMethodType(mt)
case _ =>
- def shouldInsertApply(tree: Tree) = inAllModes(mode, EXPRmode | FUNmode) && (tree.tpe match {
+ def vanillaAdapt(tree: Tree) = {
+ def shouldInsertApply(tree: Tree) = mode.inAll(EXPRmode | FUNmode) && (tree.tpe match {
case _: MethodType | _: OverloadedType | _: PolyType => false
case _ => applyPossible
})
def applyPossible = {
def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
dyna.acceptsApplyDynamic(tree.tpe) || (
- if ((mode & TAPPmode) != 0)
+ if (mode.inAll(TAPPmode))
tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
else
applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
@@ -1140,18 +1099,13 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (tree.isType)
adaptType()
- else if (
- inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application
- tree.symbol != null && tree.symbol.isTermMacro && // of a macro
- !tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined)
- macroExpand(this, tree, mode, pt)
- else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
+ else if (mode.inAll(PATTERNmode | FUNmode))
adaptConstrPattern()
else if (shouldInsertApply(tree))
insertApply()
- else if (!context.undetparams.isEmpty && !inPolyMode(mode)) { // (9)
- assert(!inHKMode(mode), modeString(mode)) //@M
- if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass)
+ else if (!context.undetparams.isEmpty && !mode.inPolyMode) { // (9)
+ assert(!mode.inHKMode, mode) //@M
+ if (mode.inExprModeButNot(FUNmode) && pt.typeSymbol == UnitClass)
instantiateExpectingUnit(tree, mode)
else
instantiate(tree, mode, pt)
@@ -1159,7 +1113,7 @@ trait Typers extends Modes with Adaptations with Tags {
tree
} else {
def fallBack: Tree = {
- if (inPatternMode(mode)) {
+ if (mode.inPatternMode) {
if ((tree.symbol ne null) && tree.symbol.isModule)
inferModulePattern(tree, pt)
if (isPopulated(tree.tpe, approximateAbstracts(pt)))
@@ -1168,7 +1122,7 @@ trait Typers extends Modes with Adaptations with Tags {
val tree1 = constfold(tree, pt) // (10) (11)
if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
else {
- if (inExprModeButNot(mode, FUNmode)) {
+ if (mode.inExprModeButNot(FUNmode)) {
pt.dealias match {
case TypeRef(_, sym, _) =>
// note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
@@ -1196,14 +1150,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
// (14); the condition prevents chains of views
debuglog("inferring view from " + tree.tpe + " to " + pt)
- val coercion = inferView(tree, tree.tpe, pt, true)
- // convert forward views of delegate types into closures wrapped around
- // the delegate's apply method (the "Invoke" method, which was translated into apply)
- if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
- val meth: Symbol = tree.tpe.member(nme.apply)
- debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe)
- return typed(Select(tree, meth), mode, pt)
- }
+ val coercion = inferView(tree, tree.tpe, pt, reportAmbiguous = true)
if (coercion != EmptyTree) {
def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
if (settings.logImplicitConv.value)
@@ -1225,9 +1172,9 @@ trait Typers extends Modes with Adaptations with Tags {
val found = tree.tpe
if (!found.isErroneous && !pt.isErroneous) {
if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) {
- val (bound, req) = pt match {
- case ExistentialType(qs, tpe) => (qs, tpe)
- case _ => (Nil, pt)
+ val bound = pt match {
+ case ExistentialType(qs, _) => qs
+ case _ => Nil
}
val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
if (boundOrSkolems.nonEmpty) {
@@ -1281,9 +1228,12 @@ trait Typers extends Modes with Adaptations with Tags {
fallBack
}
}
+ val tree1 = if (mode.inExprModeButNot(FUNmode) && treeInfo.isMacroApplication(tree)) macroExpandApply(this, tree, mode, pt) else tree
+ if (tree == tree1) vanillaAdapt(tree1) else tree1
+ }
}
- def instantiate(tree: Tree, mode: Int, pt: Type): Tree = {
+ def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
inferExprInstance(tree, context.extractUndetparams(), pt)
adapt(tree, mode, pt)
}
@@ -1291,11 +1241,9 @@ trait Typers extends Modes with Adaptations with Tags {
* with expected type Unit, but if that fails, try again with pt = WildcardType
* and discard the expression.
*/
- def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = {
+ def instantiateExpectingUnit(tree: Tree, mode: Mode): Tree = {
val savedUndetparams = context.undetparams
- silent(_.instantiate(tree, mode, UnitClass.tpe)) match {
- case SilentResultValue(t) => t
- case _ =>
+ silent(_.instantiate(tree, mode, UnitClass.tpe)) orElse { _ =>
context.undetparams = savedUndetparams
val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant())))
typed(valueDiscard, mode, UnitClass.tpe)
@@ -1355,45 +1303,38 @@ trait Typers extends Modes with Adaptations with Tags {
def doAdapt(restpe: Type) =
//util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
- if (pt != WildcardType) {
- silent(_ => doAdapt(pt)) match {
- case SilentResultValue(result) if result != qual =>
- result
- case _ =>
- debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name)
- doAdapt(WildcardType)
- }
- } else
+
+ if (pt == WildcardType)
doAdapt(pt)
+ else silent(_ => doAdapt(pt)) filter (_ != qual) orElse (_ =>
+ logResult(s"fallback on implicits in adaptToArguments: $qual.$name")(doAdapt(WildcardType))
+ )
}
/** Try to apply an implicit conversion to `qual` so that it contains
* a method `name`. If that's ambiguous try taking arguments into
* account using `adaptToArguments`.
*/
- def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
- def onError(reportError: => Tree): Tree = {
- context.tree match {
+ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def onError(reportError: => Tree): Tree = context.tree match {
case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
- silent(_.typedArgs(args, mode)) match {
- case SilentResultValue(xs) =>
- val args = xs.asInstanceOf[List[Tree]]
- if (args exists (_.isErrorTyped))
- reportError
- else
- adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors)
+ ( silent (_.typedArgs(args, mode))
+ map (_.asInstanceOf[List[Tree]])
+ filter (xs => !(xs exists (_.isErrorTyped)))
+ map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors))
+ orElse ( _ => reportError)
+ )
case _ =>
reportError
}
- case _ =>
- reportError
+
+ silent(_.adaptToMember(qual, HasMember(name), reportAmbiguous = false)) orElse (err =>
+ onError {
+ if (reportAmbiguous) context issue err
+ setError(tree)
}
+ )
}
- silent(_.adaptToMember(qual, HasMember(name), false)) match {
- case SilentResultValue(res) => res
- case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)})
- }
- }
/** Try to apply an implicit conversion to `qual` to that it contains a
* member `name` of arbitrary type.
@@ -1403,13 +1344,6 @@ trait Typers extends Modes with Adaptations with Tags {
if (member(qual, name) != NoSymbol) qual
else adaptToMember(qual, HasMember(name))
- private def typePrimaryConstrBody(clazz : Symbol, cbody: Tree, tparams: List[Symbol], enclTparams: List[Symbol], vparamss: List[List[ValDef]]): Tree = {
- // XXX: see about using the class's symbol....
- enclTparams foreach (sym => context.scope.enter(sym))
- namer.enterValueParams(vparamss)
- typed(cbody)
- }
-
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
@@ -1511,126 +1445,246 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(tparam.pos, "type parameter of value class may not be specialized")
}
- def parentTypes(templ: Template): List[Tree] =
- if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
- else try {
- val clazz = context.owner
- // Normalize supertype and mixins so that supertype is always a class, not a trait.
- var supertpt = typedTypeConstructor(templ.parents.head)
- val firstParent = supertpt.tpe.typeSymbol
- var mixins = templ.parents.tail map typedType
- // If first parent is a trait, make it first mixin and add its superclass as first parent
- while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
- val supertpt1 = typedType(supertpt)
- if (!supertpt1.isErrorTyped) {
- mixins = supertpt1 :: mixins
- supertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+ /** Typechecks a parent type reference.
+ *
+ * This typecheck is harder than it might look, because it should honor early
+ * definitions and also perform type argument inference with the help of super call
+ * arguments provided in `encodedtpt`.
+ *
+ * The method is called in batches (batch = 1 time per each parent type referenced),
+ * two batches per definition: once from namer, when entering a ClassDef or a ModuleDef
+ * and once from typer, when typechecking the definition.
+ *
+ * ***Arguments***
+ *
+ * `encodedtpt` represents the parent type reference wrapped in an `Apply` node
+ * which indicates value arguments (i.e. type macro arguments or super constructor call arguments)
+ * If no value arguments are provided by the user, the `Apply` node is still
+ * there, but its `args` will be set to `Nil`.
+ * This argument is synthesized by `tools.nsc.ast.Parsers.templateParents`.
+ *
+ * `templ` is an enclosing template, which contains a primary constructor synthesized by the parser.
+ * Such a constructor is a DefDef which contains early initializers and maybe a super constructor call
+ * (I wrote "maybe" because trait constructors don't call super constructors).
+ * This argument is synthesized by `tools.nsc.ast.Trees.Template`.
+ *
+ * `inMixinPosition` indicates whether the reference is not the first in the
+ * list of parents (and therefore cannot be a class) or the opposite.
+ *
+ * ***Return value and side effects***
+ *
+ * Returns a `TypeTree` representing a resolved parent type.
+ * If the typechecked parent reference implies non-nullary and non-empty argument list,
+ * this argument list is attached to the returned value in SuperArgsAttachment.
+ * The attachment is necessary for the subsequent typecheck to fixup a super constructor call
+ * in the body of the primary constructor (see `typedTemplate` for details).
+ *
+ * This method might invoke `typedPrimaryConstrBody`, hence it might cause the side effects
+ * described in the docs of that method. It might also attribute the Super(_, _) reference
+ * (if present) inside the primary constructor of `templ`.
+ *
+ * ***Example***
+ *
+ * For the following definition:
+ *
+ * class D extends {
+ * val x = 2
+ * val y = 4
+ * } with B(x)(3) with C(y) with T
+ *
+ * this method will be called six times:
+ *
+ * (3 times from the namer)
+ * typedParentType(Apply(Apply(Ident(B), List(Ident(x))), List(3)), templ, inMixinPosition = false)
+ * typedParentType(Apply(Ident(C), List(Ident(y))), templ, inMixinPosition = true)
+ * typedParentType(Apply(Ident(T), List()), templ, inMixinPosition = true)
+ *
+ * (3 times from the typer)
+ * <the same three calls>
+ */
+ private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
+ val app = treeInfo.dissectApplied(encodedtpt)
+ val (treeInfo.Applied(core, targs, argss), decodedtpt) = (app, app.callee)
+ val argssAreTrivial = argss == Nil || argss == ListOfNil
+
+ // we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
+ // we'll have to check the probe for isTypeMacro anyways.
+ // therefore I think it's reasonable to trade a more specific "inherits itself" error
+ // for a generic, yet understandable "cyclic reference" error
+ var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol
+ if (probe == null) probe = NoSymbol
+ probe.initialize
+
+ if (probe.isTrait || inMixinPosition) {
+ if (!argssAreTrivial) {
+ if (probe.isTrait) ConstrArgsInParentWhichIsTraitError(encodedtpt, probe)
+ else () // a class in a mixin position - this warrants an error in `validateParentClasses`
+ // therefore here we do nothing, e.g. don't check that the # of ctor arguments
+ // matches the # of ctor parameters or stuff like that
+ }
+ typedType(decodedtpt)
+ } else {
+ var supertpt = typedTypeConstructor(decodedtpt)
+ val supertparams = if (supertpt.hasSymbolField) supertpt.symbol.typeParams else Nil
+ if (supertparams.nonEmpty) {
+ typedPrimaryConstrBody(templ) {
+ val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
+ val supercall = New(supertpe, mmap(argss)(_.duplicate))
+ val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall
+ ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
+ atPos(supertpt.pos.focus)(supercall)
+ } match {
+ case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt)
+ case tpt => supertpt = TypeTree(tpt.tpe) setPos supertpt.pos // SI-7224: don't .focus positions of the TypeTree of a parent that exists in source
}
}
- if (supertpt.tpe.typeSymbol == AnyClass && firstParent.isTrait)
- supertpt.tpe = AnyRefClass.tpe
-
- // Determine
- // - supertparams: Missing type parameters from supertype
- // - supertpe: Given supertype, polymorphic in supertparams
- val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List()
- var supertpe = supertpt.tpe
- if (!supertparams.isEmpty)
- supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK)))
-
- // A method to replace a super reference by a New in a supercall
- def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match {
- case Apply(fn, args) =>
- treeCopy.Apply(scall, transformSuperCall(fn), args map (_.duplicate))
- case Select(Super(_, _), nme.CONSTRUCTOR) =>
- treeCopy.Select(
- scall,
- atPos(supertpt.pos.focus)(New(TypeTree(supertpe)) setType supertpe),
- nme.CONSTRUCTOR)
- }
+ // this is the place where we tell the typer what argss should be used for the super call
+ // if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
+ // the super call dummy is already good enough, so we don't need to do anything
+ if (argssAreTrivial) supertpt else supertpt updateAttachment SuperArgsAttachment(argss)
+ }
+ }
+ /** Typechecks the mishmash of trees that happen to be stuffed into the primary constructor of a given template.
+ * Before commencing the typecheck, replaces the `pendingSuperCall` dummy with the result of `actualSuperCall`.
+ * `actualSuperCall` can return `EmptyTree`, in which case the dummy is replaced with a literal unit.
+ *
+ * ***Return value and side effects***
+ *
+ * If a super call is present in the primary constructor and is not erased by the transform, returns it typechecked.
+ * Otherwise (e.g. if the primary constructor is missing or the super call isn't there) returns `EmptyTree`.
+ *
+ * As a side effect, this method attributes the underlying fields of early vals.
+ * Early vals aren't typechecked anywhere else, so it's essential to call `typedPrimaryConstrBody`
+ * at least once per definition. It'd be great to disentangle this logic at some point.
+ *
+ * ***Example***
+ *
+ * For the following definition:
+ *
+ * class D extends {
+ * val x = 2
+ * val y = 4
+ * } with B(x)(3) with C(y) with T
+ *
+ * the primary constructor of `templ` will be:
+ *
+ * Block(List(
+ * ValDef(NoMods, x, TypeTree(), 2)
+ * ValDef(NoMods, y, TypeTree(), 4)
+ * global.pendingSuperCall,
+ * Literal(Constant(())))
+ *
+ * Note the `pendingSuperCall` part. This is the representation of a fill-me-in-later supercall dummy,
+ * which encodes the fact that supercall argss are unknown during parsing and need to be transplanted
+ * from one of the parent types. Read more about why the argss are unknown in `tools.nsc.ast.Trees.Template`.
+ */
+ private def typedPrimaryConstrBody(templ: Template)(actualSuperCall: => Tree): Tree =
treeInfo.firstConstructor(templ.body) match {
- case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
- // Convert constructor body to block in environment and typecheck it
+ case ctor @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
val (preSuperStats, superCall) = {
val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
(stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
}
- val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
- val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
- case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
- case _ => cunit.duplicate
- })
- val outercontext = context.outer
-
+ val superCall1 = (superCall match {
+ case global.pendingSuperCall => actualSuperCall
+ case EmptyTree => EmptyTree
+ }) orElse cunit
+ val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
+ val clazz = context.owner
assert(clazz != NoSymbol, templ)
- val cscope = outercontext.makeNewScope(constr, outercontext.owner)
- val cbody2 = newTyper(cscope) // called both during completion AND typing.
- .typePrimaryConstrBody(clazz,
- cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
-
- superCall match {
- case Apply(_, _) =>
- val treeInfo.Applied(_, _, argss) = superCall
- val sarg = argss.flatten.headOption.getOrElse(EmptyTree)
- if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
- ConstrArgsInTraitParentTpeError(sarg, firstParent)
- if (!supertparams.isEmpty)
- supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos
- case _ =>
- if (!supertparams.isEmpty)
- MissingTypeArgumentsParentTpeError(supertpt)
+ val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
+ val cbody2 = { // called both during completion AND typing.
+ val typer1 = newTyper(cscope)
+ // XXX: see about using the class's symbol....
+ clazz.unsafeTypeParams foreach (sym => typer1.context.scope.enter(sym))
+ typer1.namer.enterValueParams(vparamss map (_.map(_.duplicate)))
+ typer1.typed(cbody1)
}
val preSuperVals = treeInfo.preSuperFields(templ.body)
if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
- debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+ devWarning("Wanted to zip empty presuper val list with " + preSuperStats)
else
- map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt setType ldef.symbol.tpe)
+ if (superCall1 == cunit) EmptyTree
+ else cbody2 match {
+ case Block(_, expr) => expr
+ case tree => tree
+ }
case _ =>
- if (!supertparams.isEmpty)
- MissingTypeArgumentsParentTpeError(supertpt)
+ EmptyTree
}
-/* experimental: early types as type arguments
- val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
- val earlyMap = new EarlyMap(clazz)
- List.mapConserve(supertpt :: mixins){ tpt =>
- val tpt1 = checkNoEscaping.privates(clazz, tpt)
- if (hasEarlyTypes) tpt1 else tpt1 setType earlyMap(tpt1.tpe)
- }
-*/
- //Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
+ /** Makes sure that the first type tree in the list of parent types is always a class.
+ * If the first parent is a trait, prepend its supertype to the list until it's a class.
+*/
+ private def normalizeFirstParent(parents: List[Tree]): List[Tree] = parents match {
+ case first :: rest if treeInfo.isTraitRef(first) =>
+ def explode(supertpt: Tree, acc: List[Tree]): List[Tree] = {
+ if (treeInfo.isTraitRef(supertpt)) {
+ val supertpt1 = typedType(supertpt)
+ if (!supertpt1.isErrorTyped) {
+ val supersupertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+ return explode(supersupertpt, supertpt1 :: acc)
+ }
+ }
+ if (supertpt.tpe.typeSymbol == AnyClass) supertpt setType AnyRefClass.tpe
+ supertpt :: acc
+ }
+ explode(first, Nil) ++ rest
+ case _ => parents
+ }
- // Certain parents are added in the parser before it is known whether
- // that class also declared them as parents. For instance, this is an
- // error unless we take corrective action here:
- //
- // case class Foo() extends Serializable
- //
- // So we strip the duplicates before typer.
- def fixDuplicates(remaining: List[Tree]): List[Tree] = remaining match {
+ /** Certain parents are added in the parser before it is known whether
+ * that class also declared them as parents. For instance, this is an
+ * error unless we take corrective action here:
+ *
+ * case class Foo() extends Serializable
+ *
+ * So we strip the duplicates before typer.
+ */
+ private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match {
case Nil => Nil
case x :: xs =>
val sym = x.symbol
- x :: fixDuplicates(
+ x :: fixDuplicateSyntheticParents(
if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
else xs
)
}
- fixDuplicates(supertpt :: mixins) mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
- }
- catch {
+ def typedParentTypes(templ: Template): List[Tree] = templ.parents match {
+ case Nil => List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
+ case first :: rest =>
+ try {
+ val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
+ typedParentType(first, templ, inMixinPosition = false) +:
+ (rest map (typedParentType(_, templ, inMixinPosition = true)))))
+
+ // if that is required to infer the targs of a super call
+ // typedParentType calls typedPrimaryConstrBody to do the inferring typecheck
+ // as a side effect, that typecheck also assigns types to the fields underlying early vals
+ // however if inference is not required, the typecheck doesn't happen
+ // and therefore early fields have their type trees not assigned
+ // here we detect this situation and take preventive measures
+ if (treeInfo.hasUntypedPreSuperFields(templ.body))
+ typedPrimaryConstrBody(templ)(EmptyTree)
+
+ supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt))
+ } catch {
case ex: TypeError =>
// fallback in case of cyclic errors
// @H none of the tests enter here but I couldn't rule it out
+ // upd. @E when a definition inherits itself, we end up here
+ // because `typedParentType` triggers `initialize` for parent types symbols
log("Type error calculating parents in template " + templ)
log("Error: " + ex)
ParentTypesError(templ, ex)
List(TypeTree(AnyRefClass.tpe))
}
+ }
/** <p>Check that</p>
* <ul>
@@ -1689,9 +1743,6 @@ trait Typers extends Modes with Adaptations with Tags {
!selfType.isErroneous &&
!parent.tpe.isErroneous)
{
- //Console.println(context.owner);//DEBUG
- //Console.println(context.owner.unsafeTypeParams);//DEBUG
- //Console.println(List.fromArray(context.owner.info.closure));//DEBUG
pending += ParentSelfTypeConformanceError(parent, selfType)
if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis)
}
@@ -1707,13 +1758,6 @@ trait Typers extends Modes with Adaptations with Tags {
for (p <- parents) validateParentClass(p, superclazz)
}
-/*
- if (settings.Xshowcls.value != "" &&
- settings.Xshowcls.value == context.owner.fullName)
- println("INFO "+context.owner+
- ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
- ", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
-*/
pending.foreach(ErrorUtils.issueTypeError)
}
@@ -1737,29 +1781,28 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /**
- * @param cdef ...
- * @return ...
- */
def typedClassDef(cdef: ClassDef): Tree = {
-// attributes(cdef)
val clazz = cdef.symbol
val typedMods = typedModifiers(cdef.mods)
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, newScope)) {
- _.typedTemplate(cdef.impl, parentTypes(cdef.impl))
+ _.typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
- if ((clazz != ClassfileAnnotationClass) &&
- (clazz isNonBottomSubClass ClassfileAnnotationClass))
- restrictionWarning(cdef.pos, unit,
- "subclassing Classfile does not\n"+
- "make your annotation visible at runtime. If that is what\n"+
- "you want, you must write the annotation class in Java.")
+
+ if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) {
+ if (!clazz.owner.isPackageClass)
+ unit.error(clazz.pos, "inner classes cannot be classfile annotations")
+ else restrictionWarning(cdef.pos, unit,
+ """|subclassing Classfile does not
+ |make your annotation visible at runtime. If that is what
+ |you want, you must write the annotation class in Java.""".stripMargin)
+ }
+
if (!isPastTyper) {
for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
val m = companionSymbolOf(clazz, context)
@@ -1771,10 +1814,6 @@ trait Typers extends Modes with Adaptations with Tags {
.setType(NoType)
}
- /**
- * @param mdef ...
- * @return ...
- */
def typedModuleDef(mdef: ModuleDef): Tree = {
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
@@ -1793,7 +1832,7 @@ trait Typers extends Modes with Adaptations with Tags {
)
val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
_.typedTemplate(mdef.impl, {
- parentTypes(mdef.impl) ++ (
+ typedParentTypes(mdef.impl) ++ (
if (noSerializable) Nil
else {
clazz.makeSerializable()
@@ -1823,9 +1862,7 @@ trait Typers extends Modes with Adaptations with Tags {
def pkgObjectWarning(m : Symbol, mdef : ModuleDef, restricted : String) = {
val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
- val pos = if (m.pos.isDefined) m.pos else mdef.pos
- debugwarn(s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
- debugwarn(pos.lineContent + (if (pos.isDefined) " " * (pos.column - 1) + "^" else ""))
+ context.warning(if (m.pos.isDefined) m.pos else mdef.pos, s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
}
}
@@ -1860,17 +1897,11 @@ trait Typers extends Modes with Adaptations with Tags {
if (txt eq context) namer.enterSym(tree)
else newNamer(txt).enterSym(tree)
- /**
- * @param templ ...
- * @param parents1 ...
- * <li> <!-- 2 -->
- * Check that inner classes do not inherit from Annotation
- * </li>
- * @return ...
+ /** <!-- 2 --> Check that inner classes do not inherit from Annotation
*/
def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
val clazz = context.owner
- clazz.annotations.map(_.completeInfo)
+ clazz.annotations.map(_.completeInfo())
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = templ.self match {
@@ -1897,19 +1928,34 @@ trait Typers extends Modes with Adaptations with Tags {
// the following is necessary for templates generated later
assert(clazz.info.decls != EmptyScope, clazz)
enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
+ if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
validateParentClasses(parents1, selfType)
if (clazz.isCase)
validateNoCaseAncestor(clazz)
+ if (clazz.isTrait && hasSuperArgs(parents1.head))
+ ConstrArgsInParentOfTraitError(parents1.head, clazz)
- if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass)
+ if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel)
unit.error(clazz.pos, "inner classes cannot be classfile annotations")
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
+ val body = {
val body =
if (isPastTyper || reporter.hasErrors) templ.body
else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+ val primaryCtor = treeInfo.firstConstructor(body)
+ val primaryCtor1 = primaryCtor match {
+ case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
+ val argss = superArgs(parents1.head) getOrElse Nil
+ val pos = wrappingPos(parents1.head.pos, argss.flatten)
+ val superCall = atPos(pos)(PrimarySuperCall(argss))
+ deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
+ case _ => primaryCtor
+ }
+ body mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
+ }
val body1 = typedStats(body, templ.symbol)
@@ -1922,28 +1968,24 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe
+ treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe_*
}
/** Remove definition annotations from modifiers (they have been saved
- * into the symbol's ``annotations'' in the type completer / namer)
+ * into the symbol's `annotations` in the type completer / namer)
*
* However reification does need annotation definitions to proceed.
* Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
* The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
* that involve locally defined annotations. See more about that in Reifiers.scala.
*
- * That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere).
+ * That's why the original tree gets saved into `original` field of AnnotationInfo (happens elsewhere).
* The field doesn't get pickled/unpickled and exists only during a single compilation run.
* This simultaneously allows us to reify annotations and to preserve backward compatibility.
*/
def typedModifiers(mods: Modifiers): Modifiers =
mods.copy(annotations = Nil) setPositions mods.positions
- /**
- * @param vdef ...
- * @return ...
- */
def typedValDef(vdef: ValDef): ValDef = {
val sym = vdef.symbol
val valDefTyper = {
@@ -1960,7 +2002,7 @@ trait Typers extends Modes with Adaptations with Tags {
val sym = vdef.symbol.initialize
val typedMods = typedModifiers(vdef.mods)
- sym.annotations.map(_.completeInfo)
+ sym.annotations.map(_.completeInfo())
val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
@@ -1995,10 +2037,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
/** Enter all aliases of local parameter accessors.
- *
- * @param clazz ...
- * @param vparamss ...
- * @param rhs ...
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
@@ -2161,51 +2199,6 @@ trait Typers extends Modes with Adaptations with Tags {
failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
- def typedUseCase(useCase: UseCase) {
- def stringParser(str: String): syntaxAnalyzer.Parser = {
- val file = new BatchSourceFile(context.unit.source.file, str) {
- override def positionInUltimateSource(pos: Position) = {
- pos.withSource(context.unit.source, useCase.pos.start)
- }
- }
- val unit = new CompilationUnit(file)
- new syntaxAnalyzer.UnitParser(unit)
- }
- val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
- val enclClass = context.enclClass.owner
- def defineAlias(name: Name) =
- if (context.scope.lookup(name) == NoSymbol) {
- lookupVariable(name.toString.substring(1), enclClass) match {
- case Some(repl) =>
- silent(_.typedTypeConstructor(stringParser(repl).typ())) match {
- case SilentResultValue(tpt) =>
- val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
- val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
- val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
- alias setInfo newInfo
- context.scope.enter(alias)
- case _ =>
- }
- case _ =>
- }
- }
- for (tree <- trees; t <- tree)
- t match {
- case Ident(name) if name startsWith '$' => defineAlias(name)
- case _ =>
- }
- useCase.aliases = context.scope.toList
- namer.enterSyms(trees)
- typedStats(trees, NoSymbol)
- useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
- if (settings.debug.value)
- useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
- }
-
- /**
- * @param ddef ...
- * @return ...
- */
def typedDefDef(ddef: DefDef): DefDef = {
val meth = ddef.symbol.initialize
@@ -2224,13 +2217,13 @@ trait Typers extends Modes with Adaptations with Tags {
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
- meth.annotations.map(_.completeInfo)
+ meth.annotations.map(_.completeInfo())
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
StarParamNotLastError(vparam1)
- var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
+ val tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
checkNonCyclic(ddef, tpt1)
ddef.tpt.setType(tpt1.tpe)
val typedMods = typedModifiers(ddef.mods)
@@ -2242,7 +2235,7 @@ trait Typers extends Modes with Adaptations with Tags {
meth.owner.isAnonOrRefinementClass))
InvalidConstructorDefError(ddef)
typed(ddef.rhs)
- } else if (meth.isTermMacro) {
+ } else if (meth.isMacro) {
// typechecking macro bodies is sort of unconventional
// that's why we employ our custom typing scheme orchestrated outside of the typer
transformedOr(ddef.rhs, typedMacroBody(this, ddef))
@@ -2298,7 +2291,7 @@ trait Typers extends Modes with Adaptations with Tags {
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
val typedMods = typedModifiers(tdef.mods)
- tdef.symbol.annotations.map(_.completeInfo)
+ tdef.symbol.annotations.map(_.completeInfo())
// @specialized should not be pickled when compiling with -no-specialize
if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) {
@@ -2334,7 +2327,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!nme.isLoopHeaderLabel(ldef.symbol.name) || isPastTyper) {
val restpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs, restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs1) setType restpe
}
else {
@@ -2342,26 +2335,20 @@ trait Typers extends Modes with Adaptations with Tags {
val rhs1 = typed(ldef.rhs)
val restpe = rhs1.tpe
if (restpe == initpe) { // stable result, no need to check again
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
} else {
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
}
}
- /**
- * @param block ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def typedBlock(block: Block, mode: Int, pt: Type): Block = {
+ def typedBlock(block: Block, mode: Mode, pt: Type): Block = {
val syntheticPrivates = new ListBuffer[Symbol]
try {
namer.enterSyms(block.stats)
@@ -2423,7 +2410,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => stat::Nil
})
val stats2 = typedStats(stats1, context.owner)
- val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt)
+ val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt)
treeCopy.Block(block, stats2, expr1)
.setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst)
} finally {
@@ -2433,12 +2420,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /**
- * @param cdef ...
- * @param pattpe ...
- * @param pt ...
- * @return ...
- */
def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = {
// verify no _* except in last position
for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x)
@@ -2455,83 +2436,58 @@ trait Typers extends Modes with Adaptations with Tags {
if (pat1.tpe.paramSectionCount > 0)
pat1 setType pat1.tpe.finalResultType
- if (forInteractive) {
- for (bind @ Bind(name, _) <- cdef.pat)
- if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
- namer.enterIfNotThere(bind.symbol)
- }
+ for (bind @ Bind(name, _) <- cdef.pat)
+ if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
+ namer.enterIfNotThere(bind.symbol)
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
else typed(cdef.guard, BooleanClass.tpe)
var body1: Tree = typed(cdef.body, pt)
- val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- if (contextWithTypeBounds.savedTypeBounds.nonEmpty) {
- body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe
-
+ if (context.enclosingCaseDef.savedTypeBounds.nonEmpty) {
+ body1 modifyType context.enclosingCaseDef.restoreTypeBounds
// insert a cast if something typechecked under the GADT constraints,
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
if (isFullyDefined(pt) && !(body1.tpe <:< pt))
- body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.normalize))
-
+ body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
}
- // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
- // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems
- object deskolemizeGADTSkolems extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case TypeRef(pre, sym, args) if sym.isGADTSkolem =>
- typeRef(NoPrefix, sym.deSkolemize, args)
- case tp1 => tp1
- }
- }
-
def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
- def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
+ def adaptCase(cdef: CaseDef, mode: Mode, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
def ptOrLub(tps: List[Type], pt: Type ) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst))
// takes untyped sub-trees of a match and type checks them
- def typedMatch(selector: Tree, cases: List[CaseDef], mode: Int, pt: Type, tree: Tree = EmptyTree): Match = {
+ def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = {
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
val casesTyped = typedCases(cases, selectorTp, pt)
- val (resTp, needAdapt) =
- if (opt.virtPatmat) ptOrLubPacked(casesTyped, pt)
- else ptOrLub(casesTyped map (_.tpe), pt)
+ val (resTp, needAdapt) = ptOrLubPacked(casesTyped, pt)
val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
- val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp
- if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher
- new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped)
- matchTyped
+ treeCopy.Match(tree, selector1, casesAdapted) setType resTp
}
- // match has been typed -- virtualize it if we're feeling experimental
- // (virtualized matches are expanded during type checking so they have the full context available)
- // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat)
- def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = {
- import patmat.{vpmName, PureMatchTranslator, OptimizingMatchTranslator}
+ // match has been typed -- virtualize it during type checking so the full context is available
+ def virtualizedMatch(match_ : Match, mode: Mode, pt: Type) = {
+ import patmat.{ vpmName, PureMatchTranslator }
// TODO: add fallback __match sentinel to predef
val matchStrategy: Tree =
- if (!(newPatternMatching && opt.experimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
- else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case SilentResultValue(ms) => ms
- case _ => null
- }
+ if (!(newPatternMatching && settings.Xexperimental.value && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
+ else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) orElse (_ => null)
if (matchStrategy ne null) // virtualize
typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
@@ -2561,11 +2517,9 @@ trait Typers extends Modes with Adaptations with Tags {
* an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
* however, note that pattern matching codegen is designed to run *before* uncurry
*/
- def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Int, pt0: Type): Tree = {
- assert(pt0.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt0.")
-
- val pt = deskolemizeGADTSkolems(pt0)
- val targs = pt.normalize.typeArgs
+ def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = {
+ assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.")
+ val targs = pt.dealiasWiden.typeArgs
// if targs.head isn't fully defined, we can translate --> error
targs match {
@@ -2590,7 +2544,7 @@ trait Typers extends Modes with Adaptations with Tags {
val Match(sel, cases) = tree
// need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
- val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef])
+ val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef])
// must generate a new tree every time
def selector: Tree = gen.mkUnchecked(
@@ -2709,7 +2663,7 @@ trait Typers extends Modes with Adaptations with Tags {
methodBodyTyper.context.scope enter paramSym
methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
- val defaultCase = mkDefaultCase(FALSE_typed)
+ val defaultCase = mkDefaultCase(FALSE)
val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
@@ -2757,7 +2711,7 @@ trait Typers extends Modes with Adaptations with Tags {
members foreach (m => anonClass.info.decls enter m.symbol)
val typedBlock = typedPos(tree.pos, mode, pt) {
- Block(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
+ Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
))
}
@@ -2769,24 +2723,17 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
-
- /**
- * @param fun ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
+ private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = {
val numVparams = fun.vparams.length
if (numVparams > definitions.MaxFunctionArity)
return MaxFunctionArityError(fun)
def decompose(pt: Type): (Symbol, List[Type], Type) =
if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
- ( pt.normalize.typeArgs.length - 1 == numVparams
+ ( pt.dealiasWiden.typeArgs.length - 1 == numVparams
|| fun.vparams.exists(_.tpt.isEmpty)
))
- (pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
+ (pt.typeSymbol, pt.dealiasWiden.typeArgs.init, pt.dealiasWiden.typeArgs.last)
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
@@ -2801,15 +2748,13 @@ trait Typers extends Modes with Adaptations with Tags {
else {
fun match {
case etaExpansion(vparams, fn, args) =>
- silent(_.typed(fn, forFunMode(mode), pt)) match {
- case SilentResultValue(fn1) if context.undetparams.isEmpty =>
+ silent(_.typed(fn, mode.forFunMode, pt)) filter (_ => context.undetparams.isEmpty) map { fn1 =>
// if context,undetparams is not empty, the function was polymorphic,
// so we need the missing arguments to infer its type. See #871
//println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams)
val ftpe = normalize(fn1.tpe) baseType FunctionClass(numVparams)
if (isFunctionType(ftpe) && isFullyDefined(ftpe))
return typedFunction(fun, mode, ftpe)
- case _ =>
}
case _ =>
}
@@ -2838,16 +2783,13 @@ trait Typers extends Modes with Adaptations with Tags {
if (context.retyping) context.scope enter vparam.symbol
vparam.symbol
}
- val vparams = fun.vparams mapConserve (typedValDef)
- // for (vparam <- vparams) {
- // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
- // }
+ val vparams = fun.vparams mapConserve typedValDef
val formals = vparamSyms map (_.tpe)
val body1 = typed(fun.body, respt)
val restpe = packedType(body1, fun.symbol).deconst.resultType
- val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
- // body = checkNoEscaping.locals(context.scope, restpe, body)
- treeCopy.Function(fun, vparams, body1).setType(funtpe)
+ val funtpe = appliedType(clazz, formals :+ restpe: _*)
+
+ treeCopy.Function(fun, vparams, body1) setType funtpe
}
}
}
@@ -2865,31 +2807,15 @@ trait Typers extends Modes with Adaptations with Tags {
val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
templ updateAttachment att.copy(stats = stats1)
- for (stat <- stats1 if stat.isDef) {
- val member = stat.symbol
- if (!(context.owner.ancestors forall
- (bc => member.matchingSymbol(bc, context.owner.thisType) == NoSymbol))) {
- member setFlag OVERRIDE
+ for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol)
+ stat.symbol setFlag OVERRIDE
}
}
- }
- }
def typedImport(imp : Import) : Import = (transformed remove imp) match {
case Some(imp1: Import) => imp1
case _ => log("unhandled import: "+imp+" in "+unit); imp
}
- private def isWarnablePureExpression(tree: Tree) = tree match {
- case EmptyTree | Literal(Constant(())) => false
- case _ =>
- !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && {
- val sym = tree.symbol
- (sym == null) || !(sym.isModule || sym.isLazy) || {
- debuglog("'Pure' but side-effecting expression in statement position: " + tree)
- false
- }
- }
- }
def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val inBlock = exprOwner == context.owner
@@ -2926,7 +2852,7 @@ trait Typers extends Modes with Adaptations with Tags {
ConstructorsOrderError(stat)
}
- if (isWarnablePureExpression(result)) context.warning(stat.pos,
+ if (treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
"a pure expression does nothing in statement position; " +
"you may be omitting necessary parentheses"
)
@@ -2977,7 +2903,7 @@ trait Typers extends Modes with Adaptations with Tags {
// SI-5877 The decls of a package include decls of the package object. But we don't want to add
// the corresponding synthetics to the package class, only to the package object class.
def shouldAdd(sym: Symbol) =
- inBlock || !isInPackageObject(sym, context.owner)
+ inBlock || !context.isInPackageObject(sym, context.owner)
for (sym <- scope if shouldAdd(sym))
for (tree <- context.unit.synthetics get sym) {
newStats += typedStat(tree) // might add even more synthetics to the scope
@@ -3030,14 +2956,14 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = {
- val typedMode = onlyStickyModes(mode) | newmode
- val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt))
+ def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = {
+ val typedMode = mode.onlySticky | newmode
+ val t = withCondConstrTyper((mode & SCCmode) != NOmode)(_.typed(arg, typedMode, pt))
checkDead.inMode(typedMode, t)
}
- def typedArgs(args: List[Tree], mode: Int) =
- args mapConserve (arg => typedArg(arg, mode, 0, WildcardType))
+ def typedArgs(args: List[Tree], mode: Mode) =
+ args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
/** Type trees in `args0` against corresponding expected type in `adapted0`.
*
@@ -3047,8 +2973,8 @@ trait Typers extends Modes with Adaptations with Tags {
*
* (docs reverse-engineered -- AM)
*/
- def typedArgs(args0: List[Tree], mode: Int, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
- val sticky = onlyStickyModes(mode)
+ def typedArgs(args0: List[Tree], mode: Mode, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
+ val sticky = mode.onlySticky
def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
if (args.isEmpty || adapted.isEmpty) Nil
else {
@@ -3056,11 +2982,10 @@ trait Typers extends Modes with Adaptations with Tags {
val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
val typedMode = sticky | (
if (isVarArgs) STARmode | BYVALmode
- else if (isByNameParamType(formals.head)) 0
+ else if (isByNameParamType(formals.head)) NOmode
else BYVALmode
)
var tree = typedArg(args.head, mode, typedMode, adapted.head)
- if (hasPendingMacroExpansions) tree = macroExpandAll(this, tree)
// formals may be empty, so don't call tail
tree :: loop(args.tail, formals drop 1, adapted.tail)
}
@@ -3108,18 +3033,18 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
// TODO_NMT: check the assumption that args nonEmpty
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
def preSelectOverloaded(fun: Tree): Tree = {
- if (fun.hasSymbol && fun.symbol.isOverloaded) {
+ if (fun.hasSymbolField && fun.symbol.isOverloaded) {
// remove alternatives with wrong number of parameters without looking at types.
- // less expensive than including them in inferMethodAlternatvie (see below).
+ // less expensive than including them in inferMethodAlternative (see below).
def shapeType(arg: Tree): Type = arg match {
case Function(vparams, body) =>
- functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
+ functionType(vparams map (_ => AnyClass.tpe), shapeType(body))
case AssignOrNamedArg(Ident(name), rhs) =>
NamedType(name, shapeType(rhs))
case _ =>
@@ -3127,7 +3052,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
val argtypes = args map shapeType
val pre = fun.symbol.tpe.prefix
-
var sym = fun.symbol filter { alt =>
// must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
// now fixed by using isWeaklyCompatible in exprTypeArgs
@@ -3139,20 +3063,19 @@ trait Typers extends Modes with Adaptations with Tags {
// Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at
// least two invariant type parameters. See the test case I checked in to help backstop:
// pos/isApplicableSafe.scala.
- isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
+ isApplicableSafe(context.undetparams, followApply(pre memberType alt), argtypes, pt)
}
if (sym.isOverloaded) {
- val sym1 = sym filter (alt => {
// eliminate functions that would result from tupling transforms
// keeps alternatives with repeated params
- hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
- // also keep alts which define at least one default
- alt.tpe.paramss.exists(_.exists(_.hasDefault))
- })
+ val sym1 = sym filter (alt =>
+ isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false)
+ || alt.tpe.params.exists(_.hasDefault)
+ )
if (sym1 != NoSymbol) sym = sym1
}
if (sym == NoSymbol) fun
- else adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+ else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType)
} else fun
}
@@ -3162,7 +3085,6 @@ trait Typers extends Modes with Adaptations with Tags {
case OverloadedType(pre, alts) =>
def handleOverloaded = {
val undetparams = context.extractUndetparams()
-
val argtpes = new ListBuffer[Type]
val amode = forArgMode(fun, mode)
val args1 = args map {
@@ -3171,7 +3093,11 @@ trait Typers extends Modes with Adaptations with Tags {
val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
argtpes += NamedType(name, rhs1.tpe.deconst)
// the assign is untyped; that's ok because we call doTypedApply
- atPos(arg.pos) { new AssignOrNamedArg(arg.lhs, rhs1) }
+ treeCopy.AssignOrNamedArg(arg, arg.lhs, rhs1)
+ case arg @ Typed(repeated, Ident(tpnme.WILDCARD_STAR)) =>
+ val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
+ argtpes += RepeatedType(arg1.tpe.deconst)
+ arg1
case arg =>
val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
argtpes += arg1.tpe.deconst
@@ -3181,8 +3107,8 @@ trait Typers extends Modes with Adaptations with Tags {
if (context.hasErrors)
setError(tree)
else {
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
- doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ inferMethodAlternative(fun, undetparams, argtpes.toList, pt)
+ doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt)
}
}
handleOverloaded
@@ -3190,35 +3116,31 @@ trait Typers extends Modes with Adaptations with Tags {
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
- val formals = formalTypes(paramTypes, args.length)
+ val argslen = args.length
+ val formals = formalTypes(paramTypes, argslen)
/** Try packing all arguments into a Tuple and apply `fun`
* to that. This is the last thing which is tried (after
* default arguments)
*/
- def tryTupleApply: Option[Tree] = {
- // if 1 formal, 1 arg (a tuple), otherwise unmodified args
- val tupleArgs = actualArgs(tree.pos.makeTransparent, args, formals.length)
-
- if (!sameLength(tupleArgs, args) && !isUnitForVarArgs(args, params)) {
+ def tryTupleApply: Option[Tree] = (
+ if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
+ val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
// expected one argument, but got 0 or >1 ==> try applying to tuple
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
- silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match {
- case SilentResultValue(t) =>
+ silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
// Depending on user options, may warn or error here if
// a Unit or tuple was inserted.
Some(t) filter (tupledTree =>
- !inExprModeButNot(mode, FUNmode)
+ !mode.inExprModeButNot(FUNmode)
|| tupledTree.symbol == null
|| checkValidAdaptation(tupledTree, args)
)
- case _ =>
- context.undetparams = savedUndetparams
- None
+ } orElse { _ => context.undetparams = savedUndetparams ; None }
}
- } else None
- }
+ else None
+ )
/** Treats an application which uses named or default arguments.
* Also works if names + a vararg used: when names are used, the vararg
@@ -3229,12 +3151,12 @@ trait Typers extends Modes with Adaptations with Tags {
val lencmp = compareLengths(args, formals)
def checkNotMacro() = {
- if (fun.symbol != null && fun.symbol.filter(sym => sym != null && sym.isTermMacro && !sym.isErroneous) != NoSymbol)
+ if (treeInfo.isMacroApplication(fun))
tryTupleApply getOrElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
}
if (mt.isErroneous) duplErrTree
- else if (inPatternMode(mode)) {
+ else if (mode.inPatternMode) {
// #2064
duplErrorTree(WrongNumberOfArgsError(tree, fun))
} else if (lencmp > 0) {
@@ -3245,10 +3167,10 @@ trait Typers extends Modes with Adaptations with Tags {
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
if (namelessArgs exists (_.isErroneous)) {
duplErrTree
- } else if (!isIdentity(argPos) && !sameLength(formals, params))
- // !isIdentity indicates that named arguments are used to re-order arguments
+ } else if (!allArgsArePositional(argPos) && !sameLength(formals, params))
+ // !allArgsArePositional indicates that named arguments are used to re-order arguments
duplErrorTree(MultipleVarargError(tree))
- else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) {
+ else if (allArgsArePositional(argPos) && !isNamedApplyBlock(fun)) {
// if there's no re-ordering, and fun is not transformed, no need to transform
// more than an optimization, e.g. important in "synchronized { x = update-x }"
checkNotMacro()
@@ -3298,7 +3220,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (!sameLength(formals, args) || // wrong nb of arguments
- (args exists isNamed) || // uses a named argument
+ (args exists isNamedArg) || // uses a named argument
isNamedApplyBlock(fun)) { // fun was transformed to a named apply block =>
// integrate this application into the block
if (dyna.isApplyDynamicNamed(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
@@ -3329,30 +3251,10 @@ trait Typers extends Modes with Adaptations with Tags {
// precise(foo) : foo.type => foo.type
val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
- case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
+ case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
}
- // Replace the Delegate-Chainer methods += and -= with corresponding
- // + and - calls, which are translated in the code generator into
- // Combine and Remove
- if (forMSIL) {
- fun match {
- case Select(qual, name) =>
- if (isSubType(qual.tpe, DelegateClass.tpe)
- && (name == encode("+=") || name == encode("-="))) {
- val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
- val f = Select(qual, n)
- // the compiler thinks, the PLUS method takes only one argument,
- // but he thinks it's an instance method -> still two ref's on the stack
- // -> translated by backend
- val rhs = treeCopy.Apply(tree, f, args)
- return typed(Assign(qual, rhs))
- }
- case _ => ()
- }
- }
-
/**
* This is translating uses of List() into Nil. This is less
* than ideal from a consistency standpoint, but it shouldn't be
@@ -3361,7 +3263,7 @@ trait Typers extends Modes with Adaptations with Tags {
* forced during kind-arity checking, so it is guarded by additional
* tests to ensure we're sufficiently far along.
*/
- if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
+ if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
atPos(tree.pos)(gen.mkNil setType restpe)
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
@@ -3375,7 +3277,7 @@ trait Typers extends Modes with Adaptations with Tags {
doTypedApply(tree, fun, args, mode, pt)
} else {
def handlePolymorphicCall = {
- assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
+ assert(!mode.inPatternMode, mode) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
if (targ == WildcardType) tparam.tpeHK else targ)
@@ -3401,9 +3303,8 @@ trait Typers extends Modes with Adaptations with Tags {
// define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
// returns those undetparams which have not been instantiated.
val undetparams = inferMethodInstance(fun, tparams, args1, pt)
- val result = doTypedApply(tree, fun, args1, mode, pt)
- context.undetparams = undetparams
- result
+ try doTypedApply(tree, fun, args1, mode, pt)
+ finally context.undetparams = undetparams
}
}
handlePolymorphicCall
@@ -3417,15 +3318,16 @@ trait Typers extends Modes with Adaptations with Tags {
if (!tree.isErrorTyped) setError(tree) else tree
// @H change to setError(treeCopy.Apply(tree, fun, args))
- case otpe if inPatternMode(mode) && unapplyMember(otpe).exists =>
+ case otpe if mode.inPatternMode && unapplyMember(otpe).exists =>
doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
- duplErrorTree(ApplyWithoutArgsError(tree, fun))
+ if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
+ else duplErrorTree(ApplyWithoutArgsError(tree, fun))
}
}
- def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
@@ -3460,7 +3362,7 @@ trait Typers extends Modes with Adaptations with Tags {
else None
if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
+ //Console.println(s"UNAPP: need to typetest, arg: ${arg.tpe} unappType: $unappType")
val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
val unapplyContext = context.makeNewScope(context.tree, context.owner)
freeVars foreach unapplyContext.scope.enter
@@ -3470,29 +3372,26 @@ trait Typers extends Modes with Adaptations with Tags {
// turn any unresolved type variables in freevars into existential skolems
val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg.tpe = pattp.substSym(freeVars, skolems)
+ arg setType pattp.substSym(freeVars, skolems)
argDummy setInfo arg.tpe
}
- // setType null is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
+ // clearing the type is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapp), List(arg)))
if (fun1.tpe.isErroneous) duplErrTree
else {
- val resTp = fun1.tpe.finalResultType.normalize
+ val resTp = fun1.tpe.finalResultType.dealiasWiden
val nbSubPats = args.length
val (formals, formalsExpanded) = extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol)
if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
- // This used to be the following (failing) assert:
- // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // I modified as follows. See SI-1048.
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+ val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt) // SI-1048
val itype = glb(List(pt1, arg.tpe))
- arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
+ arg setType pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
// if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
@@ -3527,16 +3426,21 @@ trait Typers extends Modes with Adaptations with Tags {
// if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
// return the corresponding extractor (an instance of ClassTag[`pt`])
- def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (!opt.virtPatmat || isPastTyper) None else {
+ def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else {
// only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
- pt.normalize.typeConstructor match {
+ // but at least make a proper type before passing it elsewhere
+ val pt1 = pt.dealiasWiden match {
+ case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
+ case pt1 => pt1
+ }
+ pt1 match {
// if at least one of the types in an intersection is checkable, use the checkable ones
// this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
// Coll is an abstract type, but SeqLike of course is not
- case RefinedType(parents, _) if (parents.length >= 2) && (parents.exists(tp => !infer.containsUnchecked(tp))) =>
+ case RefinedType(ps, _) if ps.length > 1 && (ps exists infer.isCheckable) =>
None
- case ptCheckable if infer.containsUnchecked(ptCheckable) =>
+ case ptCheckable if infer isUncheckable ptCheckable =>
val classTagExtractor = resolveClassTag(pos, ptCheckable)
if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
@@ -3549,25 +3453,33 @@ trait Typers extends Modes with Adaptations with Tags {
/**
* Convert an annotation constructor call into an AnnotationInfo.
- *
- * @param annClass the expected annotation class
*/
- def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = {
- lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil)
+ def typedAnnotation(ann: Tree, mode: Mode = EXPRmode, selfsym: Symbol = NoSymbol): AnnotationInfo = {
var hasError: Boolean = false
val pending = ListBuffer[AbsTypeError]()
+ def finish(res: AnnotationInfo): AnnotationInfo = {
+ if (hasError) {
+ pending.foreach(ErrorUtils.issueTypeError)
+ ErroneousAnnotation
+ }
+ else res
+ }
+
def reportAnnotationError(err: AbsTypeError) = {
pending += err
hasError = true
- annotationError
+ ErroneousAnnotation
}
/** Calling constfold right here is necessary because some trees (negated
* floats and literals in particular) are not yet folded.
*/
def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = {
- val const: Constant = typed(constfold(tr), EXPRmode, pt) match {
+ // The typed tree may be relevantly different than the tree `tr`,
+ // e.g. it may have encountered an implicit conversion.
+ val ttree = typed(constfold(tr), EXPRmode, pt)
+ val const: Constant = ttree match {
case l @ Literal(c) if !l.isErroneous => c
case tree => tree.tpe match {
case ConstantType(c) => c
@@ -3576,7 +3488,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (const == null) {
- reportAnnotationError(AnnotationNotAConstantError(tr)); None
+ reportAnnotationError(AnnotationNotAConstantError(ttree)); None
} else if (const.value == null) {
reportAnnotationError(AnnotationArgNullError(tr)); None
} else
@@ -3591,14 +3503,21 @@ trait Typers extends Modes with Adaptations with Tags {
reportAnnotationError(ArrayConstantsError(tree)); None
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
+ val annInfo = typedAnnotation(ann, mode, NoSymbol)
+ val annType = annInfo.tpe
+
+ if (!annType.typeSymbol.isSubClass(pt.typeSymbol))
+ reportAnnotationError(AnnotationTypeMismatchError(tpt, annType, annType))
+ else if (!annType.typeSymbol.isSubClass(ClassfileAnnotationClass))
+ reportAnnotationError(NestedAnnotationError(ann, annType))
+
if (annInfo.atp.isErroneous) { hasError = true; None }
else Some(NestedAnnotArg(annInfo))
// use of Array.apply[T: ClassTag](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
- val typedFun = typed(fun, forFunMode(mode), WildcardType)
+ val typedFun = typed(fun, mode.forFunMode, WildcardType)
if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
pt match {
case TypeRef(_, ArrayClass, targ :: _) =>
@@ -3626,44 +3545,42 @@ trait Typers extends Modes with Adaptations with Tags {
}
// begin typedAnnotation
- val (fun, argss) = {
- def extract(fun: Tree, outerArgss: List[List[Tree]]):
- (Tree, List[List[Tree]]) = fun match {
- case Apply(f, args) =>
- extract(f, args :: outerArgss)
- case Select(New(tpt), nme.CONSTRUCTOR) =>
- (fun, outerArgss)
- case _ =>
- reportAnnotationError(UnexpectedTreeAnnotation(fun))
- (setError(fun), outerArgss)
- }
- extract(ann, List())
- }
-
- val res = if (fun.isErroneous) annotationError
- else {
- val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType)
- val annType = tpt.tpe
+ val treeInfo.Applied(fun0, targs, argss) = ann
+ if (fun0.isErroneous)
+ return finish(ErroneousAnnotation)
+ val typedFun0 = typed(fun0, mode.forFunMode, WildcardType)
+ val typedFunPart = (
+ // If there are dummy type arguments in typeFun part, it suggests we
+ // must type the actual constructor call, not only the select. The value
+ // arguments are how the type arguments will be inferred.
+ if (targs.isEmpty && typedFun0.exists(t => isDummyAppliedType(t.tpe)))
+ logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _))))
+ else
+ typedFun0
+ )
+ val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart
+ val annType = annTpt.tpe
- if (typedFun.isErroneous) annotationError
+ finish(
+ if (typedFun.isErroneous)
+ ErroneousAnnotation
else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) {
// annotation to be saved as java classfile annotation
val isJava = typedFun.symbol.owner.isJavaDefined
- if (!annType.typeSymbol.isNonBottomSubClass(annClass)) {
- reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType))
- } else if (argss.length > 1) {
+ if (argss.length > 1) {
reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
- } else {
+ }
+ else {
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
val names = new scala.collection.mutable.HashSet[Symbol]
- def hasValue = names exists (_.name == nme.value)
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
+
+ def hasValue = names exists (_.name == nme.value)
val args = argss match {
- case List(List(arg)) if !isNamed(arg) && hasValue =>
- List(new AssignOrNamedArg(Ident(nme.value), arg))
- case as :: _ => as
+ case (arg :: Nil) :: Nil if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil
+ case args :: Nil => args
}
val nvPairs = args map {
@@ -3693,46 +3610,33 @@ trait Typers extends Modes with Adaptations with Tags {
reportAnnotationError(AnnotationMissingArgError(ann, annType, sym))
}
- if (hasError) annotationError
+ if (hasError) ErroneousAnnotation
else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
}
- } else if (requireJava) {
- reportAnnotationError(NestedAnnotationError(ann, annType))
- } else {
+ }
+ else {
val typedAnn = if (selfsym == NoSymbol) {
// local dummy fixes SI-5544
val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos)))
- localTyper.typed(ann, mode, annClass.tpe)
- } else {
- // Since a selfsym is supplied, the annotation should have
- // an extra "self" identifier in scope for type checking.
- // This is implemented by wrapping the rhs
- // in a function like "self => rhs" during type checking,
- // and then stripping the "self =>" and substituting
- // in the supplied selfsym.
+ localTyper.typed(ann, mode, annType)
+ }
+ else {
+ // Since a selfsym is supplied, the annotation should have an extra
+ // "self" identifier in scope for type checking. This is implemented
+ // by wrapping the rhs in a function like "self => rhs" during type
+ // checking, and then stripping the "self =>" and substituting in
+ // the supplied selfsym.
val funcparm = ValDef(NoMods, nme.self, TypeTree(selfsym.info), EmptyTree)
- val func = Function(List(funcparm), ann.duplicate)
- // The .duplicate of annot.constr
- // deals with problems that
- // accur if this annotation is
- // later typed again, which
- // the compiler sometimes does.
- // The problem is that "self"
- // ident's within annot.constr
- // will retain the old symbol
- // from the previous typing.
- val fun1clazz = FunctionClass(1)
- val funcType = typeRef(fun1clazz.tpe.prefix,
- fun1clazz,
- List(selfsym.info, annClass.tpe))
-
- (typed(func, mode, funcType): @unchecked) match {
- case t @ Function(List(arg), rhs) =>
- val subs =
- new TreeSymSubstituter(List(arg.symbol),List(selfsym))
- subs(rhs)
+ // The .duplicate of annot.constr deals with problems that accur
+ // if this annotation is later typed again, which the compiler
+ // sometimes does. The problem is that "self" ident's within
+ // annot.constr will retain the old symbol from the previous typing.
+ val func = Function(funcparm :: Nil, ann.duplicate)
+ val funcType = appliedType(FunctionClass(1), selfsym.info, annType)
+ val Function(arg :: Nil, rhs) = typed(func, mode, funcType)
+
+ rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil)
}
- }
def annInfo(t: Tree): AnnotationInfo = t match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -3757,16 +3661,10 @@ trait Typers extends Modes with Adaptations with Tags {
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
unit.deprecationWarning(ann.pos, "@deprecated now takes two arguments; see the scaladoc.")
- if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) annotationError
+ if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation
else annInfo(typedAnn)
+ })
}
- }
-
- if (hasError) {
- pending.foreach(ErrorUtils.issueTypeError)
- annotationError
- } else res
- }
def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
sym.isTypeParameter && sym.owner.isJavaDefined
@@ -3885,7 +3783,8 @@ trait Typers extends Modes with Adaptations with Tags {
else containsDef(owner, sym) || isRawParameter(sym) || isCapturedExistential(sym)
def containsLocal(tp: Type): Boolean =
tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol))
- val normalizeLocals = new TypeMap {
+
+ val dealiasLocals = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType && containsLocal(tp)) apply(tp.dealias)
@@ -3938,31 +3837,31 @@ trait Typers extends Modes with Adaptations with Tags {
for (sym <- remainingSyms) addLocals(sym.existentialBound)
}
- val normalizedTpe = normalizeLocals(tree.tpe)
- addLocals(normalizedTpe)
- packSymbols(localSyms.toList, normalizedTpe)
+ val dealiasedType = dealiasLocals(tree.tpe)
+ addLocals(dealiasedType)
+ packSymbols(localSyms.toList, dealiasedType)
}
def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
if (!checkClassType(tpt) && noGen) tpt
else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
- protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = {
+ protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = {
for (wc <- tree.whereClauses)
if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL }
else context.scope enter wc.symbol
val whereClauses1 = typedStats(tree.whereClauses, context.owner)
- for (vd @ ValDef(_, _, _, _) <- tree.whereClauses)
+ for (vd @ ValDef(_, _, _, _) <- whereClauses1)
if (vd.symbol.tpe.isVolatile)
AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
- existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) =>
+ existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) =>
TypeTree(newExistentialType(tparams, tp)) setOriginal tree
)
}
// lifted out of typed1 because it's needed in typedImplicit0
- protected def typedTypeApply(tree: Tree, mode: Int, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
+ protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
inferPolyAlternatives(fun, args map (_.tpe))
val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
@@ -3987,7 +3886,7 @@ trait Typers extends Modes with Adaptations with Tags {
val targs = args map (_.tpe)
checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
if (fun.symbol == Predef_classOf)
- typedClassOf(tree, args.head, true)
+ typedClassOf(tree, args.head, noGen = true)
else {
if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) {
val scrutineeType = fun match {
@@ -4051,7 +3950,7 @@ trait Typers extends Modes with Adaptations with Tags {
// else false
}
- def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
def argToBinding(arg: Tree): Tree = arg match {
case AssignOrNamedArg(Ident(name), rhs) => gen.mkTuple(List(CODE.LIT(name.toString), rhs))
case _ => gen.mkTuple(List(CODE.LIT(""), arg))
@@ -4121,13 +4020,9 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
- silent(typeTree) match {
- case SilentResultValue(r) => r
- case SilentTypeError(err) => DynamicRewriteError(tree, err)
+ def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree =
+ silent(typeTree) orElse (err => DynamicRewriteError(tree, err))
}
- }
- }
final def deindentTyping() = context.typingIndentLevel -= 2
final def indentTyping() = context.typingIndentLevel += 2
@@ -4140,22 +4035,33 @@ trait Typers extends Modes with Adaptations with Tags {
println(s)
}
- def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
- def isPatternMode = inPatternMode(mode)
+ def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
+ def isPatternMode = mode.inPatternMode
+ def inPatternConstructor = mode.inAll(PATTERNmode | FUNmode)
+ def isQualifierMode = mode.inAll(QUALmode)
+
+ // Lookup in the given class using the root mirror.
+ def lookupInOwner(owner: Symbol, name: Name): Symbol =
+ if (isQualifierMode) rootMirror.missingHook(owner, name) else NoSymbol
+
+ // Lookup in the given qualifier. Used in last-ditch efforts by typedIdent and typedSelect.
+ def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name)
+ def lookupInEmpty(name: Name): Symbol = rootMirror.EmptyPackageClass.info member name
- //Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
- //@M! get the type of the qualifier in a Select tree, otherwise: NoType
- def prefixType(fun: Tree): Type = fun match {
- case Select(qualifier, _) => qualifier.tpe
-// case Ident(name) => ??
- case _ => NoType
+ def lookupInQualifier(qual: Tree, name: Name): Symbol = (
+ if (name == nme.ERROR || qual.tpe.widen.isErroneous)
+ NoSymbol
+ else lookupInOwner(qual.tpe.typeSymbol, name) orElse {
+ NotAMemberError(tree, qual, name)
+ NoSymbol
}
+ )
def typedAnnotated(atd: Annotated): Tree = {
val ann = atd.annot
val arg1 = typed(atd.arg, mode, pt)
/** mode for typing the annotation itself */
- val annotMode = mode & ~TYPEmode | EXPRmode
+ val annotMode = (mode &~ TYPEmode) | EXPRmode
def resultingTypeTree(tpe: Type) = {
// we need symbol-ful originals for reification
@@ -4210,7 +4116,7 @@ trait Typers extends Modes with Adaptations with Tags {
// Erroneous annotations were already reported in typedAnnotation
arg1 // simply drop erroneous annotations
else {
- ann.tpe = atype
+ ann setType atype
resultingTypeTree(atype)
}
} else {
@@ -4221,7 +4127,7 @@ trait Typers extends Modes with Adaptations with Tags {
else {
if (ann.tpe == null) {
val annotInfo = typedAnnotation(ann, annotMode)
- ann.tpe = arg1.tpe.withAnnotation(annotInfo)
+ ann setType arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
@@ -4245,7 +4151,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (name != tpnme.WILDCARD) namer.enterInScope(sym)
else context.scope.enter(sym)
- tree setSymbol sym setType sym.tpe
+ tree setSymbol sym setType sym.tpeHK
case name: TermName =>
val sym =
@@ -4253,7 +4159,7 @@ trait Typers extends Modes with Adaptations with Tags {
else context.owner.newValue(name, tree.pos)
if (name != nme.WILDCARD) {
- if ((mode & ALTmode) != 0) VariableInPatternAlternativeError(tree)
+ if (mode.inAll(ALTmode)) VariableInPatternAlternativeError(tree)
namer.enterInScope(sym)
}
@@ -4341,8 +4247,7 @@ trait Typers extends Modes with Adaptations with Tags {
// in the special (though common) case where the types are equal, it pays to pack before comparing
// especially virtpatmat needs more aggressive unification of skolemized types
// this breaks src/library/scala/collection/immutable/TrieIterator.scala
- if ( opt.virtPatmat && !isPastTyper
- && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ if (!isPastTyper && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
&& thenTp =:= elseTp
) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
// TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
@@ -4356,7 +4261,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
+ // When there's a suitable __match in scope, virtualize the pattern match
// otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
// empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
def typedVirtualizedMatch(tree: Match): Tree = {
@@ -4366,7 +4271,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
else {
- val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
+ val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
ValDef(Modifiers(PARAM | SYNTHETIC),
@@ -4422,7 +4327,7 @@ trait Typers extends Modes with Adaptations with Tags {
// given a dealiased type.
val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
if (checkStablePrefixClassType(tpt0))
- if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
+ if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) {
context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
notifyUndetparamsAdded(context.undetparams)
TypeTree().setOriginal(tpt0)
@@ -4452,7 +4357,7 @@ trait Typers extends Modes with Adaptations with Tags {
NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
setError(tpt)
}
- else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
+ else if (!( tp == sym.thisSym.tpe_* // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
|| phase.erasedTypes
@@ -4476,36 +4381,15 @@ trait Typers extends Modes with Adaptations with Tags {
else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
case MethodType(formals, _) =>
if (isFunctionType(pt)) expr1
- else expr1 match {
- case Select(qual, name) if (forMSIL &&
- pt != WildcardType &&
- pt != ErrorType &&
- isSubType(pt, DelegateClass.tpe)) =>
- val scalaCaller = newScalaCaller(pt)
- addScalaCallerInfo(scalaCaller, expr1.symbol)
- val n: Name = scalaCaller.name
- val del = Ident(DelegateClass) setType DelegateClass.tpe
- val f = Select(del, n)
- //val f1 = TypeApply(f, List(Ident(pt.symbol) setType pt))
- val args: List[Tree] = if(expr1.symbol.isStatic) List(Literal(Constant(null)))
- else List(qual) // where the scala-method is located
- val rhs = Apply(f, args)
- typed(rhs)
- case _ =>
- adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
- }
+ else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
case ErrorType =>
expr1
case _ =>
UnderscoreEtaError(expr1)
}
- /**
- * @param args ...
- * @return ...
- */
- def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = {
- val c = context.makeSilent(false)
+ def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = {
+ val c = context.makeSilent(reportAmbiguousErrors = false)
c.retyping = true
try {
val res = newTyper(c).typedArgs(args, mode)
@@ -4564,7 +4448,7 @@ trait Typers extends Modes with Adaptations with Tags {
tryTypedArgs(args, forArgMode(fun, mode)) match {
case Some(args1) =>
val qual1 =
- if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true)
+ if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true)
else qual
if (qual1 ne qual) {
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
@@ -4577,19 +4461,14 @@ trait Typers extends Modes with Adaptations with Tags {
setError(treeCopy.Apply(tree, fun, args))
}
- silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
- case SilentResultValue(t) =>
- t
- case SilentTypeError(err) =>
- onError(err)
+ silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError
}
- }
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
if (stableApplication && isPatternMode) {
// treat stable function applications f() as expressions.
- typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
+ typed1(tree, (mode &~ PATTERNmode) | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
@@ -4612,9 +4491,9 @@ trait Typers extends Modes with Adaptations with Tags {
reportError
}
}
- silent(_.typed(fun, forFunMode(mode), funpt),
- if ((mode & EXPRmode) != 0) false else context.ambiguousErrors,
- if ((mode & EXPRmode) != 0) tree else context.tree) match {
+ silent(_.typed(fun, mode.forFunMode, funpt),
+ if (mode.inExprMode) false else context.ambiguousErrors,
+ if (mode.inExprMode) tree else context.tree) match {
case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
@@ -4633,15 +4512,6 @@ trait Typers extends Modes with Adaptations with Tags {
if (useTry) tryTypedApply(fun2, args)
else doTypedApply(tree, fun2, args, mode, pt)
- /*
- if (fun2.hasSymbol && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) {
- res.tpe = res.tpe.notNull
- }
- */
- // TODO: In theory we should be able to call:
- //if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) {
- // But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this
- // by calling ArrayClass.info here (or some other place before specialize).
if (fun2.symbol == Array_apply && !res.isErrorTyped) {
val checked = gen.mkCheckInit(res)
// this check is needed to avoid infinite recursion in Duplicators
@@ -4656,37 +4526,36 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedApply(tree: Apply) = {
- val fun = tree.fun
- val args = tree.args
- fun match {
- case Block(stats, expr) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
- case _ =>
- normalTypedApply(tree, fun, args) match {
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe != null &&
- tpt.tpe.typeSymbol == ArrayClass &&
- args.length == 1 &&
- erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
// convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
- // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times
- // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
- val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
- val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last
- atPos(tree.pos) {
- val tag = resolveClassTag(tree.pos, tagType)
- if (tag.isEmpty) MissingClassTagError(tree, tagType)
- else typed(new ApplyToImplicitArgs(Select(tag, nme.newArray), args))
+ // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len)
+ // where Array HK gets applied (N-1) times
+ object ArrayInstantiation {
+ def unapply(tree: Apply) = tree match {
+ case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == ArrayClass =>
+ Some(tpt.tpe) collect {
+ case erasure.GenericArray(level, componentType) =>
+ val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res))
+
+ resolveClassTag(tree.pos, tagType) match {
+ case EmptyTree => MissingClassTagError(tree, tagType)
+ case tag => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil))
}
- case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696
- TooManyArgumentListsForConstructor(tree)
- case tree1 =>
- tree1
}
+ case _ => None
}
}
+ def typedApply(tree: Apply) = tree match {
+ case Apply(Block(stats, expr), args) =>
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
+ case Apply(fun, args) =>
+ normalTypedApply(tree, fun, args) match {
+ case ArrayInstantiation(tree1) => typed(tree1, mode, pt)
+ case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //SI-5696
+ case tree1 => tree1
+ }
+ }
+
def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
val prefix = name.toTermName stripSuffix nme.EQL
def mkAssign(vble: Tree): Tree =
@@ -4740,8 +4609,6 @@ trait Typers extends Modes with Adaptations with Tags {
case This(_) => qual1.symbol
case _ => qual1.tpe.typeSymbol
}
- //println(clazz+"/"+qual1.tpe.typeSymbol+"/"+qual1)
-
def findMixinSuper(site: Type): Type = {
var ps = site.parents filter (_.typeSymbol.name == mix)
if (ps.isEmpty)
@@ -4749,11 +4616,6 @@ trait Typers extends Modes with Adaptations with Tags {
if (ps.isEmpty) {
debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
if (phase.erasedTypes && context.enclClass.owner.isImplClass) {
- // println(qual1)
- // println(clazz)
- // println(site)
- // println(site.parents)
- // println(mix)
// the reference to super class got lost during erasure
restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
ErrorType
@@ -4771,7 +4633,7 @@ trait Typers extends Modes with Adaptations with Tags {
val owntype = (
if (!mix.isEmpty) findMixinSuper(clazz.tpe)
- else if ((mode & SUPERCONSTRmode) != 0) clazz.info.firstParent
+ else if (mode.inAll(SUPERCONSTRmode)) clazz.info.firstParent
else intersectionType(clazz.info.parents)
)
treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
@@ -4785,14 +4647,28 @@ trait Typers extends Modes with Adaptations with Tags {
if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
}
- /** Attribute a selection where <code>tree</code> is <code>qual.name</code>.
- * <code>qual</code> is already attributed.
- *
- * @param qual ...
- * @param name ...
- * @return ...
+ /** Attribute a selection where `tree` is `qual.name`.
+ * `qual` is already attributed.
*/
def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
+ val t = typedSelectInternal(tree, qual, name)
+ // Checking for OverloadedTypes being handed out after overloading
+ // resolution has already happened.
+ if (isPastTyper) t.tpe match {
+ case OverloadedType(pre, alts) =>
+ if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) ()
+ else if (settings.debug.value) printCaller(
+ s"""|Select received overloaded type during $phase, but typer is over.
+ |If this type reaches the backend, we are likely doomed to crash.
+ |$t has these overloads:
+ |${alts map (s => " " + s.defStringSeenAs(pre memberType s)) mkString "\n"}
+ |""".stripMargin
+ )("")
+ case _ =>
+ }
+ t
+ }
+ def typedSelectInternal(tree: Tree, qual: Tree, name: Name): Tree = {
def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
}
@@ -4801,55 +4677,49 @@ trait Typers extends Modes with Adaptations with Tags {
// symbol not found? --> try to convert implicitly to a type that does have the required
// member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
// xml member to StringContext, which in turn has an unapply[Seq] method)
- if (name != nme.CONSTRUCTOR && inExprModeOr(mode, PATTERNmode)) {
- val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true)
+ if (name != nme.CONSTRUCTOR && mode.inExprModeOr(PATTERNmode)) {
+ val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = true, saveErrors = true)
if ((qual1 ne qual) && !qual1.isErrorTyped)
return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
NoSymbol
}
if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol)
- qual.tpe = tree.symbol.owner.tpe
+ qual setType tree.symbol.owner.tpe
if (!reallyExists(sym)) {
def handleMissing: Tree = {
- if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
- val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
- if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
- }
-
- // try to expand according to Dynamic rules.
- asDynamicCall foreach (x => return x)
-
- debuglog(
- "qual = " + qual + ":" + qual.tpe +
- "\nSymbol=" + qual.tpe.termSymbol + "\nsymbol-info = " + qual.tpe.termSymbol.info +
- "\nscope-id = " + qual.tpe.termSymbol.info.decls.hashCode() + "\nmembers = " + qual.tpe.members +
- "\nname = " + name + "\nfound = " + sym + "\nowner = " + context.enclClass.owner)
-
- def makeInteractiveErrorTree = {
- val tree1 = tree match {
- case Select(_, _) => treeCopy.Select(tree, qual, name)
- case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
- }
- setError(tree1)
+ def errorTree = missingSelectErrorTree(tree, qual, name)
+ def asTypeSelection = (
+ if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
+ atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match {
+ case EmptyTree => None
+ case tree1 => Some(typed1(tree1, mode, pt))
}
-
- if (name == nme.ERROR && forInteractive)
- return makeInteractiveErrorTree
-
- if (!qual.tpe.widen.isErroneous) {
- if ((mode & QUALmode) != 0) {
- val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
- if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
}
- NotAMemberError(tree, qual, name)
- }
-
- if (forInteractive) makeInteractiveErrorTree else setError(tree)
+ else None
+ )
+ debuglog(s"""
+ |qual=$qual:${qual.tpe}
+ |symbol=${qual.tpe.termSymbol.defString}
+ |scope-id=${qual.tpe.termSymbol.info.decls.hashCode}
+ |members=${qual.tpe.members mkString ", "}
+ |name=$name
+ |found=$sym
+ |owner=${context.enclClass.owner}
+ """.stripMargin)
+
+ // 1) Try converting a term selection on a java class into a type selection.
+ // 2) Try expanding according to Dynamic rules.
+ // 3) Try looking up the name in the qualifier.
+ asTypeSelection orElse asDynamicCall getOrElse (lookupInQualifier(qual, name) match {
+ case NoSymbol => setError(errorTree)
+ case found => typed1(tree setSymbol found, mode, pt)
+ })
}
handleMissing
- } else {
+ }
+ else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
@@ -4889,7 +4759,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ if accessibleError.isDefined =>
// don't adapt constructor, SI-6074
val qual1 = if (name == nme.CONSTRUCTOR) qual
- else adaptToMemberWithArgs(tree, qual, name, mode, false, false)
+ else adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = false, saveErrors = false)
if (!qual1.isErrorTyped && (qual1 ne qual))
typed(Select(qual1, name) setPos tree.pos, mode, pt)
else
@@ -4920,10 +4790,7 @@ trait Typers extends Modes with Adaptations with Tags {
val tree1 = // temporarily use `filter` and an alternative for `withFilter`
if (name == nme.withFilter)
- silent(_ => typedSelect(tree, qual1, name)) match {
- case SilentResultValue(result) =>
- result
- case _ =>
+ silent(_ => typedSelect(tree, qual1, name)) orElse { _ =>
silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
case SilentResultValue(result2) =>
unit.deprecationWarning(
@@ -4947,6 +4814,18 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
+ /** A symbol qualifies if:
+ * - it exists
+ * - it is not stale (stale symbols are made to disappear here)
+ * - if we are in a pattern constructor, method definitions do not qualify
+ * unless they are stable. Otherwise, 'case x :: xs' would find the :: method.
+ */
+ def qualifies(sym: Symbol) = (
+ sym.hasRawInfo
+ && reallyExists(sym)
+ && !(inPatternConstructor && sym.isMethod && !sym.isStable)
+ )
+
/** Attribute an identifier consisting of a simple name or an outer reference.
*
* @param tree The tree representing the identifier.
@@ -4955,251 +4834,56 @@ trait Typers extends Modes with Adaptations with Tags {
* (2) Change imported symbols to selections
*/
def typedIdent(tree: Tree, name: Name): Tree = {
- var errorContainer: AbsTypeError = null
- def ambiguousError(msg: String) = {
- assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
- errorContainer = AmbiguousIdentError(tree, name, msg)
- }
- def identError(tree: AbsTypeError) = {
- assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
- errorContainer = tree
- }
+ // setting to enable unqualified idents in empty package (used by the repl)
+ def inEmptyPackage = if (settings.exposeEmptyPackage.value) lookupInEmpty(name) else NoSymbol
+
+ def issue(err: AbsTypeError) = {
+ // Avoiding some spurious error messages: see SI-2388.
+ val suppress = reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)
+ if (!suppress)
+ ErrorUtils.issueTypeError(err)
- var defSym: Symbol = tree.symbol // the directly found symbol
- var pre: Type = NoPrefix // the prefix type of defSym, if a class member
- var qual: Tree = EmptyTree // the qualifier tree if transformed tree is a select
- var inaccessibleSym: Symbol = NoSymbol // the first symbol that was found but that was discarded
- // for being inaccessible; used for error reporting
- var inaccessibleExplanation: String = ""
-
- // If a special setting is given, the empty package will be checked as a
- // last ditch effort before failing. This method sets defSym and returns
- // true if a member of the given name exists.
- def checkEmptyPackage(): Boolean = {
- defSym = rootMirror.EmptyPackageClass.tpe.nonPrivateMember(name)
- defSym != NoSymbol
+ setError(tree)
}
- def startingIdentContext = (
// ignore current variable scope in patterns to enforce linearity
- if ((mode & (PATTERNmode | TYPEPATmode)) == 0) context
- else context.outer
- )
- // A symbol qualifies if it exists and is not stale. Stale symbols
- // are made to disappear here. In addition,
- // if we are in a constructor of a pattern, we ignore all definitions
- // which are methods (note: if we don't do that
- // case x :: xs in class List would return the :: method)
- // unless they are stable or are accessors (the latter exception is for better error messages).
- def qualifies(sym: Symbol): Boolean = {
- sym.hasRawInfo && // this condition avoids crashing on self-referential pattern variables
- reallyExists(sym) &&
- ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
- }
-
- if (defSym == NoSymbol) {
- var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
-
- var cx = startingIdentContext
- while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators
- pre = cx.enclClass.prefix
- defEntry = cx.scope.lookupEntry(name)
- if ((defEntry ne null) && qualifies(defEntry.sym)) {
- // Right here is where SI-1987, overloading in package objects, can be
- // seen to go wrong. There is an overloaded symbol, but when referring
- // to the unqualified identifier from elsewhere in the package, only
- // the last definition is visible. So overloading mis-resolves and is
- // definition-order dependent, bad things. See run/t1987.scala.
- //
- // I assume the actual problem involves how/where these symbols are entered
- // into the scope. But since I didn't figure out how to fix it that way, I
- // catch it here by looking up package-object-defined symbols in the prefix.
- if (isInPackageObject(defEntry.sym, pre.typeSymbol)) {
- defSym = pre.member(defEntry.sym.name)
- if (defSym ne defEntry.sym) {
- qual = gen.mkAttributedQualifier(pre)
- log(sm"""
- | !!! Overloaded package object member resolved incorrectly.
- | prefix: $pre
- | Discarded: ${defEntry.sym.defString}
- | Using: ${defSym.defString}
- """)
- }
- }
- else
- defSym = defEntry.sym
- }
- else {
- cx = cx.enclClass
- val foundSym = pre.member(name) filter qualifies
- defSym = foundSym filter (context.isAccessible(_, pre, false))
- if (defSym == NoSymbol) {
- if ((foundSym ne NoSymbol) && (inaccessibleSym eq NoSymbol)) {
- inaccessibleSym = foundSym
- inaccessibleExplanation = analyzer.lastAccessCheckDetails
- }
- cx = cx.outer
- }
- }
- }
-
- val symDepth = if (defEntry eq null) cx.depth
- else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
- var impSym: Symbol = NoSymbol // the imported symbol
- var imports = context.imports // impSym != NoSymbol => it is imported from imports.head
- while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) {
- impSym = imports.head.importedSymbol(name)
- if (!impSym.exists) imports = imports.tail
- }
-
- // detect ambiguous definition/import,
- // update `defSym` to be the final resolved symbol,
- // update `pre` to be `sym`s prefix type in case it is an imported member,
- // and compute value of:
-
- if (defSym.exists && impSym.exists) {
- // imported symbols take precedence over package-owned symbols in different
- // compilation units. Defined symbols take precedence over erroneous imports.
- if (defSym.isDefinedInPackage &&
- (!currentRun.compiles(defSym) ||
- context.unit.exists && defSym.sourceFile != context.unit.source.file))
- defSym = NoSymbol
- else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
- impSym = NoSymbol
- }
- if (defSym.exists) {
- if (impSym.exists)
- ambiguousError(
- "it is both defined in "+defSym.owner +
- " and imported subsequently by \n"+imports.head)
- else if (!defSym.owner.isClass || defSym.owner.isPackageClass || defSym.isTypeParameterOrSkolem)
- pre = NoPrefix
- else
- qual = atPos(tree.pos.focusStart)(gen.mkAttributedQualifier(pre))
- } else {
- if (impSym.exists) {
- var impSym1: Symbol = NoSymbol
- var imports1 = imports.tail
-
- /** It's possible that seemingly conflicting identifiers are
- * identifiably the same after type normalization. In such cases,
- * allow compilation to proceed. A typical example is:
- * package object foo { type InputStream = java.io.InputStream }
- * import foo._, java.io._
- */
- def ambiguousImport() = {
- // The types of the qualifiers from which the ambiguous imports come.
- // If the ambiguous name is a value, these must be the same.
- def t1 = imports.head.qual.tpe
- def t2 = imports1.head.qual.tpe
- // The types of the ambiguous symbols, seen as members of their qualifiers.
- // If the ambiguous name is a monomorphic type, we can relax this far.
- def mt1 = t1 memberType impSym
- def mt2 = t2 memberType impSym1
- def characterize = List(
- s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}",
- s"member type 1: $mt1",
- s"member type 2: $mt2",
- s"$impSym == $impSym1 ${impSym == impSym1}",
- s"${impSym.debugLocationString} ${impSym.getClass}",
- s"${impSym1.debugLocationString} ${impSym1.getClass}"
- ).mkString("\n ")
-
- // The symbol names are checked rather than the symbols themselves because
- // each time an overloaded member is looked up it receives a new symbol.
- // So foo.member("x") != foo.member("x") if x is overloaded. This seems
- // likely to be the cause of other bugs too...
- if (t1 =:= t2 && impSym.name == impSym1.name)
- log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1")
- // Monomorphism restriction on types is in part because type aliases could have the
- // same target type but attach different variance to the parameters. Maybe it can be
- // relaxed, but doesn't seem worth it at present.
- else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType)
- log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent")
- else {
- log(s"Import is genuinely ambiguous:\n " + characterize)
- ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}")
- }
- }
- while (errorContainer == null && !imports1.isEmpty &&
- (!imports.head.isExplicitImport(name) ||
- imports1.head.depth == imports.head.depth)) {
- impSym1 = imports1.head.importedSymbol(name)
- if (reallyExists(impSym1)) {
- if (imports1.head.isExplicitImport(name)) {
- if (imports.head.isExplicitImport(name) ||
- imports1.head.depth != imports.head.depth) ambiguousImport()
- impSym = impSym1
- imports = imports1
- } else if (!imports.head.isExplicitImport(name) &&
- imports1.head.depth == imports.head.depth) ambiguousImport()
+ val startContext = if (mode.inNone(PATTERNmode | TYPEPATmode)) context else context.outer
+ val nameLookup = tree.symbol match {
+ case NoSymbol => startContext.lookupSymbol(name, qualifies)
+ case sym => LookupSucceeded(EmptyTree, sym)
+ }
+ import InferErrorGen._
+ nameLookup match {
+ case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg))
+ case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg))
+ case LookupNotFound =>
+ inEmptyPackage orElse lookupInRoot(name) match {
+ case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext))
+ case sym => typed1(tree setSymbol sym, mode, pt)
}
- imports1 = imports1.tail
- }
- defSym = impSym
- val qual0 = imports.head.qual
- if (!(shortenImports && qual0.symbol.isPackage)) // optimization: don't write out package prefixes
- qual = atPos(tree.pos.focusStart)(resetPos(qual0.duplicate))
- pre = qual.tpe
- }
- else if (settings.exposeEmptyPackage.value && checkEmptyPackage())
- log("Allowing empty package member " + name + " due to settings.")
- else {
- if ((mode & QUALmode) != 0) {
- val lastTry = rootMirror.missingHook(rootMirror.RootClass, name)
- if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
- }
- if (settings.debug.value) {
- log(context.imports)//debug
- }
- if (inaccessibleSym eq NoSymbol) {
- // Avoiding some spurious error messages: see SI-2388.
- if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) ()
- else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext))
- } else
- identError(InferErrorGen.AccessError(
- tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner,
- inaccessibleExplanation
- ))
- defSym = context.owner.newErrorSymbol(name)
- }
- }
- }
- if (errorContainer != null) {
- ErrorUtils.issueTypeError(errorContainer)
- setError(tree)
- } else {
- if (defSym.owner.isPackageClass)
- pre = defSym.owner.thisType
-
- // Inferring classOf type parameter from expected type.
- if (defSym.isThisSym) {
- typed1(This(defSym.owner) setPos tree.pos, mode, pt)
- }
+ case LookupSucceeded(qual, sym) =>
+ (// this -> Foo.this
+ if (sym.isThisSym)
+ typed1(This(sym.owner) setPos tree.pos, mode, pt)
// Inferring classOf type parameter from expected type. Otherwise an
// actual call to the stubbed classOf method is generated, returning null.
- else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
+ else if (isPredefMemberNamed(sym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
typedClassOf(tree, TypeTree(pt.typeArgs.head))
else {
- val tree1 = (
- if (qual == EmptyTree) tree
- // atPos necessary because qualifier might come from startContext
- else atPos(tree.pos)(Select(qual, name) setAttachments tree.attachments)
- )
- val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
- // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
- val tree3 = stabilize(tree2, pre2, mode, pt)
+ val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe
+ val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name))
+ val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual)
// SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid
// inference errors in pattern matching.
- tree3 setType dropRepeatedParamType(tree3.tpe)
+ stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes
+ }) setAttachments tree.attachments
}
}
- }
def typedIdentOrWildcard(tree: Ident) = {
val name = tree.name
if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
- if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
- (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
+ if ((name == nme.WILDCARD && mode.inPatternNotFunMode) ||
+ (name == tpnme.WILDCARD && mode.inAll(TYPEmode)))
tree setType makeFullyDefined(pt)
else
typedIdent(tree, name)
@@ -5231,7 +4915,7 @@ trait Typers extends Modes with Adaptations with Tags {
val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
if (tpt1.isErrorTyped) {
tpt1
- } else if (!tpt1.hasSymbol) {
+ } else if (!tpt1.hasSymbolField) {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
val tparams = tpt1.symbol.typeParams
@@ -5289,26 +4973,6 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.PackageDef(tree, pid1, stats1) setType NoType
}
- def typedDocDef(docdef: DocDef) = {
- if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
- val comment = docdef.comment
- fillDocComment(sym, comment)
- val typer1 = newTyper(context.makeNewScope(tree, context.owner))
- for (useCase <- comment.useCases) {
- typer1.silent(_.typedUseCase(useCase)) match {
- case SilentTypeError(err) =>
- unit.warning(useCase.pos, err.errMsg)
- case _ =>
- }
- for (useCaseSym <- useCase.defined) {
- if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
- }
- }
- }
- typed(docdef.definition, mode, pt)
- }
-
/**
* The typer with the correct context for a method definition. If the method is a default getter for
* a constructor default, the resulting typer has a constructor context (fixes SI-5543).
@@ -5325,7 +4989,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedStar(tree: Star) = {
- if ((mode & STARmode) == 0 && !isPastTyper)
+ if (mode.inNone(STARmode) && !isPastTyper)
StarPatternWithVarargParametersError(tree)
treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
@@ -5378,16 +5042,16 @@ trait Typers extends Modes with Adaptations with Tags {
// that typecheck must not trigger macro expansions, so we explicitly prohibit them
// however we cannot do `context.withMacrosDisabled`
// because `expr` might contain nested macro calls (see SI-6673)
- val exprTyped = typed1(expr updateAttachment SuppressMacroExpansionAttachment, mode, pt)
+ val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
exprTyped match {
- case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
+ case macroDef if treeInfo.isMacroApplication(macroDef) =>
MacroEtaError(exprTyped)
case _ =>
typedEta(checkDead(exprTyped))
}
case Ident(tpnme.WILDCARD_STAR) =>
- val exprTyped = typed(expr, onlyStickyModes(mode), WildcardType)
+ val exprTyped = typed(expr, mode.onlySticky, WildcardType)
def subArrayType(pt: Type) =
if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
else {
@@ -5396,8 +5060,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
- case ArrayClass => (adapt(exprTyped, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
- case _ => (adapt(exprTyped, onlyStickyModes(mode), seqType(pt)), SeqClass)
+ case ArrayClass => (adapt(exprTyped, mode.onlySticky, subArrayType(pt)), ArrayClass)
+ case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass)
}
exprAdapted.tpe.baseType(baseClass) match {
case TypeRef(_, _, List(elemtp)) =>
@@ -5408,7 +5072,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
val tptTyped = typedType(tpt, mode)
- val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst)
+ val exprTyped = typed(expr, mode.onlySticky, tptTyped.tpe.deconst)
val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
if (isPatternMode) {
@@ -5440,7 +5104,7 @@ trait Typers extends Modes with Adaptations with Tags {
//val undets = context.undetparams
// @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
- val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
+ val fun1 = typed(fun, mode.forFunMode | TAPPmode, WildcardType)
val tparams = fun1.symbol.typeParams
//@M TODO: val undets_fun = context.undetparams ?
@@ -5572,7 +5236,7 @@ trait Typers extends Modes with Adaptations with Tags {
case tree: TypeDef => typedTypeDef(tree)
case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
case tree: PackageDef => typedPackageDef(tree)
- case tree: DocDef => typedDocDef(tree)
+ case tree: DocDef => typedDocDef(tree, mode, pt)
case tree: Annotated => typedAnnotated(tree)
case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
@@ -5588,18 +5252,11 @@ trait Typers extends Modes with Adaptations with Tags {
case tree: ApplyDynamic => typedApplyDynamic(tree)
case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
- case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
}
}
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
lastTreeToTyper = tree
indentTyping()
@@ -5610,18 +5267,18 @@ trait Typers extends Modes with Adaptations with Tags {
try {
if (context.retyping &&
(tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
- tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
+ tree.clearType()
+ if (tree.hasSymbolField) tree.symbol = NoSymbol
}
val alreadyTyped = tree.tpe ne null
- var tree1: Tree = if (alreadyTyped) tree else {
+ val tree1: Tree = if (alreadyTyped) tree else {
printTyping(
ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
"enrichmentEnabled" -> context.enrichmentEnabled,
- "mode" -> modeString(mode),
+ "mode" -> mode,
"silent" -> context.bufferErrors,
"context.owner" -> context.owner
)
@@ -5640,8 +5297,13 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
- val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, ptPlugins, tree)
+ tree1 modifyType (pluginsTyped(_, this, tree1, mode, ptPlugins))
+ val result =
+ if (tree1.isEmpty) tree1
+ else {
+ val result = adapt(tree1, mode, ptPlugins, tree)
+ if (hasPendingMacroExpansions) macroExpandAll(this, result) else result
+ }
if (!alreadyTyped) {
printTyping("adapted %s: %s to %s, %s".format(
@@ -5652,7 +5314,7 @@ trait Typers extends Modes with Adaptations with Tags {
result
} catch {
case ex: TypeError =>
- tree.tpe = null
+ tree.clearType()
// The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere.
printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG
@@ -5677,41 +5339,34 @@ trait Typers extends Modes with Adaptations with Tags {
def atOwner(tree: Tree, owner: Symbol): Typer =
newTyper(context.make(tree, owner))
- /** Types expression or definition <code>tree</code>.
- *
- * @param tree ...
- * @return ...
+ /** Types expression or definition `tree`.
*/
def typed(tree: Tree): Tree = {
val ret = typed(tree, EXPRmode, WildcardType)
ret
}
- def typedPos(pos: Position, mode: Int, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
+ def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree))
// TODO: see if this formulation would impose any penalty, since
// it makes for a lot less casting.
// def typedPos[T <: Tree](pos: Position)(tree: T): T = typed(atPos(pos)(tree)).asInstanceOf[T]
- /** Types expression <code>tree</code> with given prototype <code>pt</code>.
- *
- * @param tree ...
- * @param pt ...
- * @return ...
+ /** Types expression `tree` with given prototype `pt`.
*/
def typed(tree: Tree, pt: Type): Tree =
typed(tree, EXPRmode, pt)
- /** Types qualifier <code>tree</code> of a select node.
- * E.g. is tree occurs in a context like <code>tree.m</code>.
+ /** Types qualifier `tree` of a select node.
+ * E.g. is tree occurs in a context like `tree.m`.
*/
- def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree =
+ def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree =
typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
- /** Types qualifier <code>tree</code> of a select node.
- * E.g. is tree occurs in a context like <code>tree.m</code>.
+ /** Types qualifier `tree` of a select node.
+ * E.g. is tree occurs in a context like `tree.m`.
*/
- def typedQualifier(tree: Tree, mode: Int): Tree =
+ def typedQualifier(tree: Tree, mode: Mode): Tree =
typedQualifier(tree, mode, WildcardType)
def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
@@ -5720,7 +5375,7 @@ trait Typers extends Modes with Adaptations with Tags {
def typedOperator(tree: Tree): Tree =
typed(tree, EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
- /** Types a pattern with prototype <code>pt</code> */
+ /** Types a pattern with prototype `pt` */
def typedPattern(tree: Tree, pt: Type): Tree = {
// We disable implicits because otherwise some constructs will
// type check which should not. The pattern matcher does not
@@ -5744,25 +5399,23 @@ trait Typers extends Modes with Adaptations with Tags {
}
/** Types a (fully parameterized) type tree */
- def typedType(tree: Tree, mode: Int): Tree =
- typed(tree, forTypeMode(mode), WildcardType)
+ def typedType(tree: Tree, mode: Mode): Tree =
+ typed(tree, mode.forTypeMode, WildcardType)
/** Types a (fully parameterized) type tree */
def typedType(tree: Tree): Tree = typedType(tree, NOmode)
/** Types a higher-kinded type tree -- pt denotes the expected kind*/
- def typedHigherKindedType(tree: Tree, mode: Int, pt: Type): Tree =
+ def typedHigherKindedType(tree: Tree, mode: Mode, pt: Type): Tree =
if (pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
else typed(tree, HKmode, pt)
- def typedHigherKindedType(tree: Tree, mode: Int): Tree =
+ def typedHigherKindedType(tree: Tree, mode: Mode): Tree =
typed(tree, HKmode, WildcardType)
- def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode)
-
/** Types a type constructor tree used in a new or supertype */
- def typedTypeConstructor(tree: Tree, mode: Int): Tree = {
- val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType)
+ def typedTypeConstructor(tree: Tree, mode: Mode): Tree = {
+ val result = typed(tree, mode.forTypeMode | FUNmode, WildcardType)
// get rid of type aliases for the following check (#1241)
result.tpe.dealias match {
@@ -5783,7 +5436,7 @@ trait Typers extends Modes with Adaptations with Tags {
def computeType(tree: Tree, pt: Type): Type = {
// macros employ different logic of `computeType`
- assert(!context.owner.isTermMacro, context.owner)
+ assert(!context.owner.isMacro, context.owner)
val tree1 = typed(tree, pt)
transformed(tree) = tree1
val tpe = packedType(tree1, context.owner)
@@ -5792,8 +5445,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
def computeMacroDefType(tree: Tree, pt: Type): Type = {
- assert(context.owner.isTermMacro, context.owner)
- assert(tree.symbol.isTermMacro, tree.symbol)
+ assert(context.owner.isMacro, context.owner)
+ assert(tree.symbol.isMacro, tree.symbol)
assert(tree.isInstanceOf[DefDef], tree.getClass)
val ddef = tree.asInstanceOf[DefDef]
@@ -5819,32 +5472,21 @@ trait Typers extends Modes with Adaptations with Tags {
case None => op
}
- def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
+ def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed.get(tree) match {
case Some(tree1) => transformed -= tree; tree1
case None => typed(tree, mode, pt)
}
-
-/*
- def convertToTypeTree(tree: Tree): Tree = tree match {
- case TypeTree() => tree
- case _ => TypeTree(tree.tpe)
}
-*/
}
-}
object TypersStats {
import scala.reflect.internal.TypesStats._
- import scala.reflect.internal.BaseTypeSeqsStats._
val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
val typedSelectCount = Statistics.newCounter("#typechecked selections")
val typedApplyCount = Statistics.newCounter("#typechecked applications")
val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
- val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
- val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
- val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index b51dc0ccd5..589e5ce6fd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -23,7 +23,6 @@ trait Unapplies extends ast.TreeDSL
private val unapplyParamName = nme.x_0
-
// In the typeCompleter (templateSig) of a case class (resp it's module),
// synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
// their signatures, the corresponding ClassDef is needed. During naming (in
@@ -46,17 +45,6 @@ trait Unapplies extends ast.TreeDSL
}
}
- /** returns type of the unapply method returning T_0...T_n
- * for n == 0, boolean
- * for n == 1, Some[T0]
- * else Some[Product[Ti]]
- */
- def unapplyReturnTypeExpected(argsLength: Int) = argsLength match {
- case 0 => BooleanClass.tpe
- case 1 => optionType(WildcardType)
- case n => optionType(productType((List fill n)(WildcardType)))
- }
-
/** returns unapply or unapplySeq if available */
def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match {
case NoSymbol => tp member nme.unapplySeq
@@ -148,7 +136,7 @@ trait Unapplies extends ast.TreeDSL
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
deleted file mode 100644
index ea436a71fb..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab.Flags.{ VarianceFlags => VARIANCES, _ }
-
-/** Variances form a lattice, 0 <= COVARIANT <= Variances, 0 <= CONTRAVARIANT <= VARIANCES
- */
-trait Variances {
-
- val global: Global
- import global._
-
- /** Flip between covariant and contravariant */
- private def flip(v: Int): Int = {
- if (v == COVARIANT) CONTRAVARIANT
- else if (v == CONTRAVARIANT) COVARIANT
- else v
- }
-
- /** Map everything below VARIANCES to 0 */
- private def cut(v: Int): Int =
- if (v == VARIANCES) v else 0
-
- /** Compute variance of type parameter `tparam` in types of all symbols `sym`. */
- def varianceInSyms(syms: List[Symbol])(tparam: Symbol): Int =
- (VARIANCES /: syms) ((v, sym) => v & varianceInSym(sym)(tparam))
-
- /** Compute variance of type parameter `tparam` in type of symbol `sym`. */
- def varianceInSym(sym: Symbol)(tparam: Symbol): Int =
- if (sym.isAliasType) cut(varianceInType(sym.info)(tparam))
- else varianceInType(sym.info)(tparam)
-
- /** Compute variance of type parameter `tparam` in all types `tps`. */
- def varianceInTypes(tps: List[Type])(tparam: Symbol): Int =
- (VARIANCES /: tps) ((v, tp) => v & varianceInType(tp)(tparam))
-
- /** Compute variance of type parameter `tparam` in all type arguments
- * <code>tps</code> which correspond to formal type parameters `tparams1`.
- */
- def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol): Int = {
- var v: Int = VARIANCES;
- for ((tp, tparam1) <- tps zip tparams1) {
- val v1 = varianceInType(tp)(tparam)
- v = v & (if (tparam1.isCovariant) v1
- else if (tparam1.isContravariant) flip(v1)
- else cut(v1))
- }
- v
- }
-
- /** Compute variance of type parameter `tparam` in all type annotations `annots`. */
- def varianceInAttribs(annots: List[AnnotationInfo])(tparam: Symbol): Int = {
- (VARIANCES /: annots) ((v, annot) => v & varianceInAttrib(annot)(tparam))
- }
-
- /** Compute variance of type parameter `tparam` in type annotation `annot`. */
- def varianceInAttrib(annot: AnnotationInfo)(tparam: Symbol): Int = {
- varianceInType(annot.atp)(tparam)
- }
-
- /** Compute variance of type parameter <code>tparam</code> in type <code>tp</code>. */
- def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
- case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
- VARIANCES
- case BoundedWildcardType(bounds) =>
- varianceInType(bounds)(tparam)
- case SingleType(pre, sym) =>
- varianceInType(pre)(tparam)
- case TypeRef(pre, sym, args) =>
- if (sym == tparam) COVARIANT
- // tparam cannot occur in tp's args if tp is a type constructor (those don't have args)
- else if (tp.isHigherKinded) varianceInType(pre)(tparam)
- else varianceInType(pre)(tparam) & varianceInArgs(args, sym.typeParams)(tparam)
- case TypeBounds(lo, hi) =>
- flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
- case RefinedType(parents, defs) =>
- varianceInTypes(parents)(tparam) & varianceInSyms(defs.toList)(tparam)
- case MethodType(params, restpe) =>
- flip(varianceInSyms(params)(tparam)) & varianceInType(restpe)(tparam)
- case NullaryMethodType(restpe) =>
- varianceInType(restpe)(tparam)
- case PolyType(tparams, restpe) =>
- flip(varianceInSyms(tparams)(tparam)) & varianceInType(restpe)(tparam)
- case ExistentialType(tparams, restpe) =>
- varianceInSyms(tparams)(tparam) & varianceInType(restpe)(tparam)
- case AnnotatedType(annots, tp, _) =>
- varianceInAttribs(annots)(tparam) & varianceInType(tp)(tparam)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala
index 638bca8a72..e4f879560c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala
@@ -3,11 +3,13 @@
*/
package scala.tools.nsc
-package interpreter
+package util
-import scala.tools.nsc.io.{ File, AbstractFile }
+import scala.tools.nsc.io.AbstractFile
+import java.security.cert.Certificate
+import java.security.{ ProtectionDomain, CodeSource }
import util.ScalaClassLoader
-import java.net.URL
+import java.net.{ URL, URLConnection, URLStreamHandler }
import scala.collection.{ mutable, immutable }
/**
@@ -25,15 +27,15 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
protected def findAbstractFile(name: String): AbstractFile = {
var file: AbstractFile = root
- val pathParts = classNameToPath(name) split '/'
+ val pathParts = name split '/'
for (dirPart <- pathParts.init) {
- file = file.lookupName(dirPart, true)
+ file = file.lookupName(dirPart, directory = true)
if (file == null)
return null
}
- file.lookupName(pathParts.last, false) match {
+ file.lookupName(pathParts.last, directory = false) match {
case null => null
case file => file
}
@@ -47,28 +49,53 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
val pathParts = dirNameToPath(name) split '/'
for (dirPart <- pathParts) {
- file = file.lookupName(dirPart, true)
+ file = file.lookupName(dirPart, directory = true)
if (file == null)
return null
}
- return file
+ file
}
+ // parent delegation in JCL uses getResource; so either add parent.getResAsStream
+ // or implement findResource, which we do here as a study in scarlet (my complexion
+ // after looking at CLs and URLs)
+ override def findResource(name: String): URL = findAbstractFile(name) match {
+ case null => null
+ case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
+ override def openConnection(url: URL): URLConnection = new URLConnection(url) {
+ override def connect() { }
+ override def getInputStream = file.input
+ }
+ })
+ }
+
+ // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
override def getResourceAsStream(name: String) = findAbstractFile(name) match {
case null => super.getResourceAsStream(name)
case file => file.input
}
- override def classBytes(name: String): Array[Byte] = findAbstractFile(name) match {
+ // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating
+ override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match {
case null => super.classBytes(name)
case file => file.toByteArray
}
- override def findClass(name: String): JClass = {
+ override def findClass(name: String): Class[_] = {
val bytes = classBytes(name)
if (bytes.length == 0)
throw new ClassNotFoundException(name)
else
- defineClass(name, bytes, 0, bytes.length)
+ defineClass(name, bytes, 0, bytes.length, protectionDomain)
+ }
+
+ lazy val protectionDomain = {
+ val cl = Thread.currentThread().getContextClassLoader()
+ val resource = cl.getResource("scala/runtime/package.class")
+ if (resource == null) null else {
+ val s = resource.getPath
+ val path = s.substring(0, s.lastIndexOf('!'))
+ new ProtectionDomain(new CodeSource(new URL(path), null.asInstanceOf[Array[Certificate]]), null, this, null)
+ }
}
private val packages = mutable.Map[String, Package]()
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 471e2653cf..5f13baa107 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -11,9 +11,9 @@ import java.net.URL
import scala.collection.{ mutable, immutable }
import io.{ File, Directory, Path, Jar, AbstractFile }
import scala.reflect.internal.util.StringOps.splitWhere
-import scala.reflect.ClassTag
import Jar.isJarOrZip
import File.pathSeparator
+import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
import java.net.MalformedURLException
import java.util.regex.PatternSyntaxException
@@ -33,10 +33,6 @@ object ClassPath {
def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
- def basedir(s: String) =
- if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
- else "."
-
if (pattern == "*") lsDir(Directory("."))
else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
@@ -58,22 +54,6 @@ object ClassPath {
/** Split the classpath, apply a transformation function, and reassemble it. */
def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
- /** Split the classpath, filter according to predicate, and reassemble. */
- def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
-
- /** Split the classpath and map them into Paths */
- def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
-
- /** Make all classpath components absolute. */
- def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*)
-
- /** Join the paths as a classpath */
- def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
- def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*)
-
- /** Split the classpath and map them into URLs */
- def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
-
/** Expand path and possibly expanding stars */
def expandPath(path: String, expandStar: Boolean = true): List[String] =
if (expandStar) split(path) flatMap expandS
@@ -126,31 +106,29 @@ object ClassPath {
/** Creators for sub classpaths which preserve this context.
*/
def sourcesInPath(path: String): List[ClassPath[T]] =
- for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
+ for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
new SourcePath[T](dir, this)
def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
- for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
+ for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
newClassPath(entry)
- def classesAtAllURLS(path: String): List[ClassPath[T]] =
- (path split " ").toList flatMap classesAtURL
-
- def classesAtURL(spec: String) =
- for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
- newClassPath(location)
-
def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
- classesInPathImpl(path, true).toIndexedSeq
+ classesInPathImpl(path, expand = true).toIndexedSeq
- def classesInPath(path: String) = classesInPathImpl(path, false)
+ def classesInPath(path: String) = classesInPathImpl(path, expand = false)
// Internal
private def classesInPathImpl(path: String, expand: Boolean) =
for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
newClassPath(dir)
+
+ def classesInManifest(used: Boolean) =
+ if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil
}
+ def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList
+
class JavaContext extends ClassPathContext[AbstractFile] {
def toBinaryName(rep: AbstractFile) = {
val name = rep.name
@@ -216,8 +194,7 @@ abstract class ClassPath[T] {
def sourcepaths: IndexedSeq[AbstractFile]
/**
- * Represents classes which can be loaded with a ClassfileLoader/MsilFileLoader
- * and / or a SourcefileLoader.
+ * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
def name: String = binary match {
@@ -239,7 +216,7 @@ abstract class ClassPath[T] {
* Does not support nested classes on .NET
*/
def findClass(name: String): Option[AnyClassRep] =
- splitWhere(name, _ == '.', true) match {
+ splitWhere(name, _ == '.', doDropIndex = true) match {
case Some((pkg, rest)) =>
val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
rep map {
@@ -283,7 +260,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
else if (f.isDirectory && validPackage(f.name))
packageBuf += new SourcePath[T](f, context)
}
- (packageBuf.result, classBuf.result)
+ (packageBuf.result(), classBuf.result())
}
lazy val (packages, classes) = traverse()
@@ -296,7 +273,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
+ def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL)
def asClasspathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
@@ -310,7 +287,7 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab
else if (f.isDirectory && validPackage(f.name))
packageBuf += new DirectoryClassPath(f, context)
}
- (packageBuf.result, classBuf.result)
+ (packageBuf.result(), classBuf.result())
}
lazy val (packages, classes) = traverse()
@@ -408,15 +385,3 @@ class JavaClassPath(
containers: IndexedSeq[ClassPath[AbstractFile]],
context: JavaContext)
extends MergedClassPath[AbstractFile](containers, context) { }
-
-object JavaClassPath {
- def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = {
- val containers = {
- for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield
- new DirectoryClassPath(f, context)
- }
- new JavaClassPath(containers.toIndexedSeq, context)
- }
- def fromURLs(urls: Seq[URL]): JavaClassPath =
- fromURLs(urls, ClassPath.DefaultJavaContext)
-}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
index 9cf2c535df..e8f962a9e2 100644
--- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
+++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package util
import scala.util.parsing.combinator._
-import scala.util.parsing.input.{ Reader }
import scala.util.parsing.input.CharArrayReader.EofCh
import scala.collection.mutable.ListBuffer
@@ -22,7 +21,6 @@ import scala.collection.mutable.ListBuffer
trait ParserUtil extends Parsers {
protected implicit class ParserPlus[+T](underlying: Parser[T]) {
def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
- def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
}
}
@@ -38,7 +36,6 @@ case class CommandLine(
def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs)
def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs)
- def originalArgs = args
def assumeBinary = true
def enforceArity = true
def onlyKnownOptions = false
@@ -106,7 +103,6 @@ case class CommandLine(
def isSet(arg: String) = args contains arg
def get(arg: String) = argMap get arg
- def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
def apply(arg: String) = argMap(arg)
override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString)
@@ -116,7 +112,6 @@ object CommandLineParser extends RegexParsers with ParserUtil {
override def skipWhitespace = false
def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
- def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
def escaped(ch: Char): Parser[String] = "\\" + ch
def mkQuoted(ch: Char): Parser[String] = (
elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index dde53dc640..ba44126df2 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -74,7 +74,7 @@ object DocStrings {
else idx :: findAll(str, idx)(p)
}
- /** Produces a string index, which is a list of ``sections'', i.e
+ /** Produces a string index, which is a list of `sections`, i.e
* pairs of start/end positions of all tagged sections in the string.
* Every section starts with an at sign and extends to the next at sign,
* or to the end of the comment string, but excluding the final two
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 34344263e8..1608ffa425 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -3,8 +3,6 @@ package util
import java.util.concurrent.ExecutionException
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
-import scala.reflect.internal.util.StringOps._
-import scala.language.implicitConversions
object Exceptional {
def unwrap(x: Throwable): Throwable = x match {
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
index 5421843438..e877c990f0 100644
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
@@ -14,11 +14,6 @@ trait FreshNameCreator {
*/
def newName(): String
def newName(prefix: String): String
-
- @deprecated("use newName(prefix)", "2.9.0")
- def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix)
- @deprecated("use newName()", "2.9.0")
- def newName(pos: scala.reflect.internal.util.Position): String = newName()
}
object FreshNameCreator {
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index b7ed7903bc..26d19906c2 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -14,74 +14,32 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
def this(buf: IndexedSeq[Char], decodeUni: Boolean, error: String => Unit) =
this(buf, 0, /* 1, 1, */ decodeUni, error)
- /** produce a duplicate of this char array reader which starts reading
- * at current position, independent of what happens to original reader
- */
- def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader]
-
- /** layout constant
- */
- val tabinc = 8
-
/** the line and column position of the current character
*/
var ch: Char = _
var bp = start
- var oldBp = -1
- var oldCh: Char = _
-
- //private var cline: Int = _
- //private var ccol: Int = _
def cpos = bp
var isUnicode: Boolean = _
- var lastLineStartPos: Int = 0
- var lineStartPos: Int = 0
- var lastBlankLinePos: Int = 0
-
- private var onlyBlankChars = false
- //private var nextline = startline
- //private var nextcol = startcol
-
- private def markNewLine() {
- lastLineStartPos = lineStartPos
- if (onlyBlankChars) lastBlankLinePos = lineStartPos
- lineStartPos = bp
- onlyBlankChars = true
- //nextline += 1
- //nextcol = 1
- }
-
- def hasNext: Boolean = if (bp < buf.length) true
- else {
- false
- }
- def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals
+ def hasNext = bp < buf.length
def next(): Char = {
- //cline = nextline
- //ccol = nextcol
val buf = this.buf.asInstanceOf[collection.mutable.WrappedArray[Char]].array
if(!hasNext) {
ch = SU
return SU // there is an endless stream of SU's at the end
}
- oldBp = bp
- oldCh = ch
ch = buf(bp)
isUnicode = false
bp = bp + 1
ch match {
case '\t' =>
- // nextcol = ((nextcol - 1) / tabinc * tabinc) + tabinc + 1;
case CR =>
- if (bp < buf.size && buf(bp) == LF) {
+ if (bp < buf.length && buf(bp) == LF) {
ch = LF
bp += 1
}
- markNewLine()
case LF | FF =>
- markNewLine()
case '\\' =>
def evenSlashPrefix: Boolean = {
var p = bp - 2
@@ -90,34 +48,23 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
}
def udigit: Int = {
val d = digit2int(buf(bp), 16)
- if (d >= 0) { bp += 1; /* nextcol = nextcol + 1 */ }
- else error("error in unicode escape");
+ if (d >= 0) bp += 1
+ else error("error in unicode escape")
d
}
- // nextcol += 1
if (buf(bp) == 'u' && decodeUni && evenSlashPrefix) {
do {
bp += 1 //; nextcol += 1
- } while (buf(bp) == 'u');
+ } while (buf(bp) == 'u')
val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
ch = code.asInstanceOf[Char]
isUnicode = true
}
case _ =>
- if (ch > ' ') onlyBlankChars = false
- // nextcol += 1
}
ch
}
- def rewind() {
- if (oldBp == -1) throw new IllegalArgumentException
- bp = oldBp
- ch = oldCh
- oldBp = -1
- oldCh = 'x'
- }
-
def copy: JavaCharArrayReader =
new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error)
}
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
deleted file mode 100644
index aa3b7c286d..0000000000
--- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
+++ /dev/null
@@ -1,169 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-// $Id$
-
-package scala.tools.nsc
-package util
-
-import java.io.File
-import java.net.URL
-import java.util.StringTokenizer
-import scala.util.Sorting
-import scala.collection.mutable
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
-import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
-import ClassPath.{ ClassPathContext, isTraitImplementation }
-
-/** Keeping the MSIL classpath code in its own file is important to make sure
- * we don't accidentally introduce a dependency on msil.jar in the jvm.
- */
-
-object MsilClassPath {
- def collectTypes(assemFile: AbstractFile) = {
- var res: Array[MSILType] = MSILType.EmptyTypes
- val assem = Assembly.LoadFrom(assemFile.path)
- if (assem != null) {
- // DeclaringType == null: true for non-inner classes
- res = assem.GetTypes() filter (_.DeclaringType == null)
- Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
- }
- res
- }
-
- /** On the java side this logic is in PathResolver, but as I'm not really
- * up to folding MSIL into that, I am encapsulating it here.
- */
- def fromSettings(settings: Settings): MsilClassPath = {
- val context =
- if (settings.inline.value) new MsilContext
- else new MsilContext { override def isValidName(name: String) = !isTraitImplementation(name) }
-
- import settings._
- new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context)
- }
-
- class MsilContext extends ClassPathContext[MsilFile] {
- def toBinaryName(rep: MsilFile) = rep.msilType.Name
- def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this)
- }
-
- private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MsilFile]] = {
- import ClassPath._
- val etr = new mutable.ListBuffer[ClassPath[MsilFile]]
- val names = new mutable.HashSet[String]
-
- // 1. Assemblies from -Xassem-extdirs
- for (dirName <- expandPath(ext, expandStar = false)) {
- val dir = AbstractFile.getDirectory(dirName)
- if (dir ne null) {
- for (file <- dir) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += context.newClassPath(file)
- }
- }
- }
- }
-
- // 2. Assemblies from -Xassem-path
- for (fileName <- expandPath(user, expandStar = false)) {
- val file = AbstractFile.getFile(fileName)
- if (file ne null) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += context.newClassPath(file)
- }
- }
- }
-
- def check(n: String) {
- if (!names.contains(n))
- throw new AssertionError("Cannot find assembly "+ n +
- ". Use -Xassem-extdirs or -Xassem-path to specify its location")
- }
- check("mscorlib.dll")
- check("scalaruntime.dll")
-
- // 3. Source path
- for (dirName <- expandPath(source, expandStar = false)) {
- val file = AbstractFile.getDirectory(dirName)
- if (file ne null) etr += new SourcePath[MsilFile](file, context)
- }
-
- etr.toList
- }
-}
-import MsilClassPath._
-
-/**
- * A assembly file (dll / exe) containing classes and namespaces
- */
-class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MsilFile] {
- def name = {
- val i = namespace.lastIndexOf('.')
- if (i < 0) namespace
- else namespace drop (i + 1)
- }
- def asURLs = List(new java.net.URL(name))
- def asClasspathString = sys.error("Unknown") // I don't know what if anything makes sense here?
-
- private lazy val first: Int = {
- var m = 0
- var n = types.length - 1
- while (m < n) {
- val l = (m + n) / 2
- val res = types(l).FullName.compareTo(namespace)
- if (res < 0) m = l + 1
- else n = l
- }
- if (types(m).FullName.startsWith(namespace)) m else types.length
- }
-
- lazy val classes = {
- val cls = new mutable.ListBuffer[ClassRep]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
- if (types(i).Namespace == namespace)
- cls += ClassRep(Some(new MsilFile(types(i))), None)
- i += 1
- }
- cls.toIndexedSeq
- }
-
- lazy val packages = {
- val nsSet = new mutable.HashSet[String]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- val subns = types(i).Namespace
- if (subns.length > namespace.length) {
- // example: namespace = "System", subns = "System.Reflection.Emit"
- // => find second "." and "System.Reflection" to nsSet.
- val end = subns.indexOf('.', namespace.length + 1)
- nsSet += (if (end < 0) subns
- else subns.substring(0, end))
- }
- i += 1
- }
- val xs = for (ns <- nsSet.toList)
- yield new AssemblyClassPath(types, ns, context)
-
- xs.toIndexedSeq
- }
-
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
-
- override def toString() = "assembly classpath "+ namespace
-}
-
-/**
- * The classpath when compiling with target:msil. Binary files are represented as
- * MSILType values.
- */
-class MsilClassPath(ext: String, user: String, source: String, context: MsilContext)
-extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { } \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index 1f6fa68f57..3899ef24c7 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -34,9 +34,9 @@ trait ScalaClassLoader extends JClassLoader {
def setAsContext() { setContext(this) }
/** Load and link a class with this classloader */
- def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, false)
+ def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = false)
/** Load, link and initialize a class with this classloader */
- def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, true)
+ def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = true)
private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
@@ -46,9 +46,6 @@ trait ScalaClassLoader extends JClassLoader {
def create(path: String): AnyRef =
tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
- def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] =
- classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
-
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
case null => Array()
@@ -71,14 +68,6 @@ trait ScalaClassLoader extends JClassLoader {
try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary
catch unwrapHandler({ case ex => throw ex })
}
-
- /** A list comprised of this classloader followed by all its
- * (non-null) parent classloaders, if any.
- */
- def loaderChain: List[ScalaClassLoader] = this :: (getParent match {
- case null => Nil
- case p => p.loaderChain
- })
}
/** Methods for obtaining various classloaders.
@@ -99,35 +88,6 @@ object ScalaClassLoader {
}
def contextLoader = apply(Thread.currentThread.getContextClassLoader)
def appLoader = apply(JClassLoader.getSystemClassLoader)
- def extLoader = apply(appLoader.getParent)
- def bootLoader = apply(null)
- def contextChain = loaderChain(contextLoader)
-
- def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass)
- def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class"
- def locate[T: ClassTag] = contextLoader getResource pathToErasure[T]
-
- /** Tries to guess the classpath by type matching the context classloader
- * and its parents, looking for any classloaders which will reveal their
- * classpath elements as urls. It it can't find any, creates a classpath
- * from the supplied string.
- */
- def guessClassPathString(default: String = ""): String = {
- val classpathURLs = contextChain flatMap {
- case x: HasClassPath => x.classPathURLs
- case x: JURLClassLoader => x.getURLs.toSeq
- case _ => Nil
- }
- if (classpathURLs.isEmpty) default
- else JavaClassPath.fromURLs(classpathURLs).asClasspathString
- }
-
- def loaderChain(head: JClassLoader) = {
- def loop(cl: JClassLoader): List[JClassLoader] =
- if (cl == null) Nil else cl :: loop(cl.getParent)
-
- loop(head)
- }
def setContext(cl: JClassLoader) =
Thread.currentThread.setContextClassLoader(cl)
def savingContextLoader[T](body: => T): T = {
@@ -142,16 +102,13 @@ object ScalaClassLoader {
with HasClassPath {
private var classloaderURLs: Seq[URL] = urls
- private def classpathString = ClassPath.fromURLs(urls: _*)
def classPathURLs: Seq[URL] = classloaderURLs
- def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs
/** Override to widen to public */
override def addURL(url: URL) = {
classloaderURLs :+= url
super.addURL(url)
}
- def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
}
def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
@@ -162,7 +119,6 @@ object ScalaClassLoader {
fromURLs(urls) tryToLoadClass name isDefined
/** Finding what jar a clazz or instance came from */
- def origin(x: Any): Option[URL] = originOfClass(x.getClass)
def originOfClass(x: Class[_]): Option[URL] =
Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
}
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 2b87280c24..f91e94471a 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -7,13 +7,12 @@ package scala.tools
package nsc
package util
-import java.io.{File, FileInputStream, PrintStream}
+import java.io.PrintStream
import java.lang.Long.toHexString
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
import scala.reflect.internal.{Flags, Names}
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import interpreter.ByteCode.scalaSigBytesForPath
object ShowPickled extends Names {
import PickleFormat._
@@ -94,7 +93,6 @@ object ShowPickled extends Names {
case ANNOTATEDtpe => "ANNOTATEDtpe"
case ANNOTINFO => "ANNOTINFO"
case ANNOTARGARRAY => "ANNOTARGARRAY"
- // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
case EXISTENTIALtpe => "EXISTENTIALtpe"
case TREE => "TREE"
case MODIFIERS => "MODIFIERS"
@@ -251,7 +249,7 @@ object ShowPickled extends Names {
case SYMANNOT =>
printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTATEDtpe =>
- printTypeRef(); buf.until(end, printAnnotInfoRef);
+ printTypeRef(); buf.until(end, printAnnotInfoRef)
case ANNOTINFO =>
printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTARGARRAY =>
@@ -272,8 +270,7 @@ object ShowPickled extends Names {
for (i <- 0 until index.length) printEntry(i)
}
- def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
- def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+ def fromFile(path: String) = fromBytes(io.File(path).toByteArray())
def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
try Some(new PickleBuffer(data, 0, data.length))
catch { case _: Exception => None }
@@ -288,7 +285,7 @@ object ShowPickled extends Names {
def main(args: Array[String]) {
args foreach { arg =>
- (fromFile(arg) orElse fromName(arg)) match {
+ fromFile(arg) match {
case Some(pb) => show(arg + ":", pb)
case _ => Console.println("Cannot read " + arg)
}
diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
index b103ae9cb0..6997dbd402 100644
--- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
+++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
@@ -6,7 +6,7 @@ package util
import java.io.PrintStream
/** A simple tracer
- * @param out: The print stream where trace info shoul be sent
+ * @param out: The print stream where trace info should be sent
* @param enabled: A condition that must be true for trace info to be produced.
*/
class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
@@ -14,6 +14,5 @@ class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
if (enabled) out.println(msg+value)
value
}
- def withOutput(out: PrintStream) = new SimpleTracer(out, enabled)
def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled)
}
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index b1f4696d3e..4f7a9ff878 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -7,9 +7,9 @@ class WorkScheduler {
type Action = () => Unit
- private var todo = new mutable.Queue[Action]
- private var throwables = new mutable.Queue[Throwable]
- private var interruptReqs = new mutable.Queue[InterruptReq]
+ private val todo = new mutable.Queue[Action]
+ private val throwables = new mutable.Queue[Throwable]
+ private val interruptReqs = new mutable.Queue[InterruptReq]
/** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
def waitForMoreWork() = synchronized {
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index d34d4ee092..039fec8605 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -18,16 +18,9 @@ package object util {
type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T]
val HashSet = scala.reflect.internal.util.HashSet
- def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value
-
/** Apply a function and return the passed value */
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
- /** Frequency counter */
- def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size)
-
- def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1)
-
/** Execute code and then wait for all non-daemon Threads
* created and begun during its execution to complete.
*/
@@ -54,18 +47,6 @@ package object util {
(result, ts2 filterNot (ts1 contains _))
}
- /** Given a function and a block of code, evaluates code block,
- * calls function with milliseconds elapsed, and returns block result.
- */
- def millisElapsedTo[T](f: Long => Unit)(body: => T): T = {
- val start = System.currentTimeMillis
- val result = body
- val end = System.currentTimeMillis
-
- f(end - start)
- result
- }
-
/** Generate a string using a routine that wants to write on a stream. */
def stringFromWriter(writer: PrintWriter => Unit): String = {
val stringWriter = new StringWriter()
@@ -83,8 +64,19 @@ package object util {
}
def stackTraceString(ex: Throwable): String = stringFromWriter(ex printStackTrace _)
+ /** A one line string which contains the class of the exception, the
+ * message if any, and the first non-Predef location in the stack trace
+ * (to exclude assert, require, etc.)
+ */
+ def stackTraceHeadString(ex: Throwable): String = {
+ val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef") take 1 mkString ""
+ val msg = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" }
+ val clazz = ex.getClass.getName.split('.').last
+
+ s"$clazz$msg @ $frame"
+ }
+
lazy val trace = new SimpleTracer(System.out)
- lazy val errtrace = new SimpleTracer(System.err)
@deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
val StringOps = scala.reflect.internal.util.StringOps
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index d35ac43424..ac50324fa9 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -2,7 +2,9 @@ package scala.tools
package reflect
import scala.reflect.reify.Taggers
-import scala.tools.nsc.typechecker.{Analyzer, Macros}
+import scala.tools.nsc.typechecker.{ Analyzer, Macros }
+import scala.reflect.runtime.Macros.currentMirror
+import scala.reflect.api.Universe
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
@@ -12,30 +14,32 @@ trait FastTrack {
import global._
import definitions._
-
import scala.language.implicitConversions
- private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
- private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
+ import treeInfo.Applied
+
+ private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
+ new { val c: c0.type = c0 } with Taggers
+ private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } =
+ new { val c: c0.type = c0 } with MacroImplementations
+ private def make(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
+ sym -> new FastTrackEntry(pf)
- implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c)
- type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
- case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) {
- def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
- def run(c: MacroContext): Any = {
- val result = expander((c, c.expandee))
- c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
+ final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree]) extends (MacroArgs => Any) {
+ def validate(tree: Tree) = pf isDefinedAt Applied(tree)
+ def apply(margs: MacroArgs) = {
+ val MacroArgs(c, args) = margs
+ // Macros validated that the pf is defined here - and there's not much we could do if it weren't.
+ c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing)
}
}
- lazy val fastTrack: Map[Symbol, FastTrackEntry] = {
- var registry = Map[Symbol, FastTrackEntry]()
- implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
- materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) }
- materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
- materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
- ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
- ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
- StringContext_f bindTo { case (c, app@Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
- registry
- }
+ /** A map from a set of pre-established macro symbols to their implementations. */
+ lazy val fastTrack = Map[Symbol, FastTrackEntry](
+ make( materializeClassTag) { case Applied(_, ttag :: Nil, _) => _.materializeClassTag(ttag.tpe) },
+ make( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
+ make( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
+ make( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
+ make( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) },
+ make(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree }
+ )
}
diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala
index f0d3d5973d..e3341a451f 100644
--- a/src/compiler/scala/tools/reflect/FrontEnd.scala
+++ b/src/compiler/scala/tools/reflect/FrontEnd.scala
@@ -21,7 +21,7 @@ trait FrontEnd {
def hasErrors = ERROR.count > 0
def hasWarnings = WARNING.count > 0
- case class Info(val pos: Position, val msg: String, val severity: Severity)
+ case class Info(pos: Position, msg: String, severity: Severity)
val infos = new scala.collection.mutable.LinkedHashSet[Info]
/** Handles incoming info */
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 86cd845c54..002a3fce82 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -1,6 +1,5 @@
package scala.tools.reflect
-import scala.reflect.macros.{ReificationException, UnexpectedReificationException}
import scala.reflect.macros.runtime.Context
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Stack
@@ -27,7 +26,7 @@ abstract class MacroImplementations {
"too many arguments for interpolated string")
}
val stringParts = parts map {
- case Literal(Constant(s: String)) => s;
+ case Literal(Constant(s: String)) => s
case _ => throw new IllegalArgumentException("argument parts must be a list of string literals")
}
@@ -38,7 +37,7 @@ abstract class MacroImplementations {
val argsStack = Stack(args : _*)
def defval(value: Tree, tpe: Type): Unit = {
- val freshName = newTermName(c.fresh("arg$"))
+ val freshName = newTermName(c.freshName("arg$"))
evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
ids += Ident(freshName)
}
@@ -89,7 +88,7 @@ abstract class MacroImplementations {
var idx = 0
if (!first) {
- val arg = argsStack.pop
+ val arg = argsStack.pop()
if (strIsEmpty || (str charAt 0) != '%') {
bldr append "%s"
defval(arg, AnyTpe)
@@ -142,9 +141,9 @@ abstract class MacroImplementations {
Literal(Constant(fstring)),
newTermName("format")),
List(ids: _* )
- );
+ )
Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 116ae24cdd..3ae21b6b98 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -4,7 +4,6 @@ package reflect
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.util.PathResolver
@@ -16,4 +15,4 @@ object ReflectMain extends Driver {
}
override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index a3bc9b9bd1..6c1821f8aa 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -1,7 +1,6 @@
package scala.tools
package reflect
-import java.lang.{Class => jClass}
import scala.reflect.{ClassTag, classTag}
import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
@@ -24,7 +23,7 @@ trait StdTags {
}
})
- private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+ protected def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
u.TypeTag[T](
m,
new TypeCreator {
@@ -35,8 +34,6 @@ trait StdTags {
lazy val tagOfString = tagOfStaticClass[String]
lazy val tagOfFile = tagOfStaticClass[scala.tools.nsc.io.File]
lazy val tagOfDirectory = tagOfStaticClass[scala.tools.nsc.io.Directory]
- lazy val tagOfStdReplVals = tagOfStaticClass[scala.tools.nsc.interpreter.StdReplVals]
- lazy val tagOfIMain = tagOfStaticClass[scala.tools.nsc.interpreter.IMain]
lazy val tagOfThrowable = tagOfStaticClass[java.lang.Throwable]
lazy val tagOfClassLoader = tagOfStaticClass[java.lang.ClassLoader]
lazy val tagOfBigInt = tagOfStaticClass[BigInt]
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index ab814b617d..be22003114 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -101,4 +101,4 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
/** Represents an error during toolboxing
*/
-case class ToolBoxError(val message: String, val cause: Throwable = null) extends Throwable(message, cause)
+case class ToolBoxError(message: String, cause: Throwable = null) extends Throwable(message, cause)
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index c05c59d5ff..e6bbe1dbed 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -1,13 +1,11 @@
package scala.tools
package reflect
+import scala.tools.nsc.EXPRmode
import scala.tools.nsc.reporters._
import scala.tools.nsc.CompilerCommand
-import scala.tools.nsc.Global
-import scala.tools.nsc.typechecker.Modes
import scala.tools.nsc.io.VirtualDirectory
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import scala.tools.nsc.util.FreshNameCreator
+import scala.tools.nsc.util.AbstractFileClassLoader
import scala.reflect.internal.Flags._
import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
import java.lang.{Class => jClass}
@@ -29,8 +27,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader)
lazy val mirror: u.Mirror = u.runtimeMirror(classLoader)
- class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
- extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) {
+ class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter0: Reporter)
+ extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) {
import definitions._
private val trace = scala.tools.nsc.util.trace when settings.debug.value
@@ -73,13 +71,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
if (!typed.isEmpty) throw ToolBoxError("reflective toolbox has failed: cannot operate on trees that are already typed")
- val freeTypes = expr.freeTypes
- if (freeTypes.length > 0) {
- var msg = "reflective toolbox has failed:" + EOL
- msg += "unresolved free type variables (namely: " + (freeTypes map (ft => "%s %s".format(ft.name, ft.origin)) mkString ", ") + "). "
- msg += "have you forgot to use TypeTag annotations for type parameters external to a reifee? "
- msg += "if you have troubles tracking free type variables, consider using -Xlog-free-types"
- throw ToolBoxError(msg)
+ if (expr.freeTypes.nonEmpty) {
+ val ft_s = expr.freeTypes map (ft => s" ${ft.name} ${ft.origin}") mkString "\n "
+ throw ToolBoxError(s"""
+ |reflective toolbox failed due to unresolved free type variables:
+ |$ft_s
+ |have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+ |if you have troubles tracking free type variables, consider using -Xlog-free-types
+ """.stripMargin.trim)
}
}
@@ -100,9 +99,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
if (namesakes.length > 0) name += ("$" + (namesakes.length + 1))
freeTermNames += (ft -> newTermName(name + nme.REIFY_FREE_VALUE_SUFFIX))
})
- var expr = new Transformer {
+ val expr = new Transformer {
override def transform(tree: Tree): Tree =
- if (tree.hasSymbol && tree.symbol.isFreeTerm) {
+ if (tree.hasSymbolField && tree.symbol.isFreeTerm) {
tree match {
case Ident(_) =>
val freeTermRef = Ident(freeTermNames(tree.symbol.asFreeTerm))
@@ -132,7 +131,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
val owner = ownerClass.newLocalDummy(expr.pos)
- var currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
+ val currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
val wrapper1 = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
@@ -146,18 +145,18 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
case Block(dummies, unwrapped) => (dummies, unwrapped)
case unwrapped => (Nil, unwrapped)
}
- var invertedIndex = freeTerms map (_.swap)
+ val invertedIndex = freeTerms map (_.swap)
// todo. also fixup singleton types
unwrapped = new Transformer {
override def transform(tree: Tree): Tree =
tree match {
- case Ident(name) if invertedIndex contains name =>
+ case Ident(name: TermName) if invertedIndex contains name =>
Ident(invertedIndex(name)) setType tree.tpe
case _ =>
super.transform(tree)
}
}.transform(unwrapped)
- new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name)))).traverse(unwrapped)
+ new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name.toTermName)))).traverse(unwrapped)
unwrapped = if (expr0.isTerm) unwrapped else unwrapFromTerm(unwrapped)
unwrapped
}
@@ -166,7 +165,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
- currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt)) match {
+ currentTyper.silent(_.typed(expr, EXPRmode, pt)) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
result
@@ -202,7 +201,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def wrap(expr0: Tree): ModuleDef = {
val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
- val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
+ val (obj, _) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
nextWrapperModuleName())
val minfo = ClassInfoType(List(ObjectClass.tpe), newScope, obj.moduleClass)
@@ -231,12 +230,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
emptyValDef,
NoMods,
List(),
- List(List()),
List(methdef),
NoPosition))
trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
- var cleanedUp = resetLocalAttrs(moduledef)
+ val cleanedUp = resetLocalAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
cleanedUp.asInstanceOf[ModuleDef]
}
@@ -285,7 +283,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val file = new BatchSourceFile("<toolbox>", wrappedCode)
val unit = new CompilationUnit(file)
phase = run.parserPhase
- val parser = new syntaxAnalyzer.UnitParser(unit)
+ val parser = newUnitParser(unit)
val wrappedTree = parser.parse()
throwIfErrors()
val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
@@ -313,11 +311,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// reporter doesn't accumulate errors, but the front-end does
def throwIfErrors() = {
- if (frontEnd.hasErrors) {
- var msg = "reflective compilation has failed: " + EOL + EOL
- msg += frontEnd.infos map (_.msg) mkString EOL
- throw ToolBoxError(msg)
- }
+ if (frontEnd.hasErrors) throw ToolBoxError(
+ "reflective compilation has failed: " + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL)
+ )
}
}
@@ -337,15 +333,15 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
command.settings.outputDirs setSingleOutput virtualDirectory
val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings))
if (frontEnd.hasErrors) {
- var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL
- msg += frontEnd.infos map (_.msg) mkString EOL
- throw ToolBoxError(msg)
+ throw ToolBoxError(
+ "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL +
+ (frontEnd.infos map (_.msg) mkString EOL)
+ )
}
instance
} catch {
case ex: Throwable =>
- var msg = "reflective compilation has failed: cannot initialize the compiler due to %s".format(ex.toString)
- throw ToolBoxError(msg, ex)
+ throw ToolBoxError(s"reflective compilation has failed: cannot initialize the compiler due to $ex", ex)
}
}
@@ -354,8 +350,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches {
if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType)
- var ctree: compiler.Tree = importer.importTree(tree)
- var cexpectedType: compiler.Type = importer.importType(expectedType)
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val cexpectedType: compiler.Type = importer.importType(expectedType)
if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
@@ -374,9 +370,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches {
if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos)
- var ctree: compiler.Tree = importer.importTree(tree)
- var cpt: compiler.Type = importer.importType(pt)
- var cpos: compiler.Position = importer.importPosition(pos)
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val cpt: compiler.Type = importer.importType(pt)
+ val cpos: compiler.Position = importer.importPosition(pos)
if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos)
@@ -398,9 +394,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
uttree
}
- def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
- compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
-
def parse(code: String): u.Tree = {
if (compiler.settings.verbose.value) println("parsing "+code)
val ctree: compiler.Tree = compiler.parse(code)
@@ -410,7 +403,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def compile(tree: u.Tree): () => Any = {
if (compiler.settings.verbose.value) println("importing "+tree)
- var ctree: compiler.Tree = importer.importTree(tree)
+ val ctree: compiler.Tree = importer.importTree(tree)
if (compiler.settings.verbose.value) println("compiling "+ctree)
compiler.compile(ctree)
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index 3f880bf7f8..1055894121 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -32,7 +32,7 @@ package object reflect {
/** Creates a reporter that prints messages to the console according to the settings.
*
- * ``minSeverity'' determines minimum severity of the messages to be printed.
+ * `minSeverity` determines minimum severity of the messages to be printed.
* 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
*/
// todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
@@ -52,7 +52,7 @@ package object reflect {
override def hasWarnings = reporter.hasWarnings
def display(info: Info): Unit = info.severity match {
- case API_INFO => reporter.info(info.pos, info.msg, false)
+ case API_INFO => reporter.info(info.pos, info.msg, force = false)
case API_WARNING => reporter.warning(info.pos, info.msg)
case API_ERROR => reporter.error(info.pos, info.msg)
}
@@ -76,7 +76,6 @@ package object reflect {
private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): Reporter = new AbstractReporter {
val settings = settings0
- import frontEnd.{Severity => ApiSeverity}
val API_INFO = frontEnd.INFO
val API_WARNING = frontEnd.WARNING
val API_ERROR = frontEnd.ERROR
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index c3264d0787..3cfc1eb2a1 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -6,13 +6,14 @@
package scala.tools
package util
-import java.lang.reflect.{ GenericSignatureFormatError, Method, Constructor }
-import java.lang.{ ClassLoader => JavaClassLoader }
import scala.tools.nsc.util.ScalaClassLoader
-import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
-import scala.tools.nsc.io.File
-import Javap._
-import scala.language.reflectiveCalls
+import java.io.PrintWriter
+
+trait JpResult {
+ def isError: Boolean
+ def value: Any
+ def show(): Unit
+}
trait Javap {
def loader: ScalaClassLoader
@@ -29,147 +30,3 @@ object NoJavap extends Javap {
def tryFile(path: String): Option[Array[Byte]] = None
def tryClass(path: String): Array[Byte] = Array()
}
-
-class JavapClass(
- val loader: ScalaClassLoader = ScalaClassLoader.appLoader,
- val printWriter: PrintWriter = new PrintWriter(System.out, true)
-) extends Javap {
-
- lazy val parser = new JpOptions
-
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- private def failed = (EnvClass eq null) || (PrinterClass eq null)
-
- val PrinterCtr = (
- if (failed) null
- else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
- )
-
- def findBytes(path: String): Array[Byte] =
- tryFile(path) getOrElse tryClass(path)
-
- def apply(args: Seq[String]): List[JpResult] = {
- if (failed) Nil
- else args.toList filterNot (_ startsWith "-") map { path =>
- val bytes = findBytes(path)
- if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path))
- else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args)))
- }
- }
-
- def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- if (failed) null
- else PrinterCtr.newInstance(in, printWriter, env)
-
- def newEnv(opts: Seq[String]): FakeEnvironment = {
- lazy val env: FakeEnvironment = EnvClass.newInstance()
-
- if (failed) null
- else parser(opts) foreach { case (name, value) =>
- val field = EnvClass getDeclaredField name
- field setAccessible true
- field.set(env, value.asInstanceOf[AnyRef])
- }
-
- env
- }
-
- /** Assume the string is a path and try to find the classfile
- * it represents.
- */
- def tryFile(path: String): Option[Array[Byte]] = {
- val file = File(
- if (path.endsWith(".class")) path
- else path.replace('.', '/') + ".class"
- )
- if (!file.exists) None
- else try Some(file.toByteArray) catch { case x: Exception => None }
- }
- /** Assume the string is a fully qualified class name and try to
- * find the class object it represents.
- */
- def tryClass(path: String): Array[Byte] = {
- val extName = (
- if (path endsWith ".class") (path dropRight 6).replace('/', '.')
- else path
- )
- loader.classBytes(extName)
- }
-}
-
-object Javap {
- val Env = "sun.tools.javap.JavapEnvironment"
- val Printer = "sun.tools.javap.JavapPrinter"
-
- def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) =
- cl.tryToInitializeClass[AnyRef](Env).isDefined
-
- // "documentation"
- type FakeEnvironment = AnyRef
- type FakePrinter = AnyRef
-
- def apply(path: String): Unit = apply(Seq(path))
- def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show())
-
- sealed trait JpResult {
- type ResultType
- def isError: Boolean
- def value: ResultType
- def show(): Unit
- // todo
- // def header(): String
- // def fields(): List[String]
- // def methods(): List[String]
- // def signatures(): List[String]
- }
- class JpError(msg: String) extends JpResult {
- type ResultType = String
- def isError = true
- def value = msg
- def show() = println(msg)
- }
- class JpSuccess(val value: AnyRef) extends JpResult {
- type ResultType = AnyRef
- def isError = false
- def show() = value.asInstanceOf[{ def print(): Unit }].print()
- }
-
- class JpOptions {
- private object Access {
- final val PRIVATE = 0
- final val PROTECTED = 1
- final val PACKAGE = 2
- final val PUBLIC = 3
- }
- private val envActionMap: Map[String, (String, Any)] = {
- val map = Map(
- "-l" -> (("showLineAndLocal", true)),
- "-c" -> (("showDisassembled", true)),
- "-s" -> (("showInternalSigs", true)),
- "-verbose" -> (("showVerbose", true)),
- "-private" -> (("showAccess", Access.PRIVATE)),
- "-package" -> (("showAccess", Access.PACKAGE)),
- "-protected" -> (("showAccess", Access.PROTECTED)),
- "-public" -> (("showAccess", Access.PUBLIC)),
- "-all" -> (("showallAttr", true))
- )
- map ++ List(
- "-v" -> map("-verbose"),
- "-p" -> map("-private")
- )
- }
- def apply(opts: Seq[String]): Seq[(String, Any)] = {
- opts flatMap { opt =>
- envActionMap get opt match {
- case Some(pair) => List(pair)
- case _ =>
- val charOpts = opt.tail.toSeq map ("-" + _)
- if (charOpts forall (envActionMap contains _))
- charOpts map envActionMap
- else Nil
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 0af1011bda..d8e545e6b1 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -6,7 +6,6 @@
package scala.tools
package util
-import java.net.{ URL, MalformedURLException }
import scala.tools.reflect.WrappedProperties.AccessControl
import nsc.{ Settings, GenericRunnerSettings }
import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
@@ -19,16 +18,9 @@ import scala.language.postfixOps
// https://wiki.scala-lang.org/display/SW/Classpath
object PathResolver {
- // Imports property/environment functions which suppress
- // security exceptions.
+ // Imports property/environment functions which suppress security exceptions.
import AccessControl._
- def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse ""
-
- /** Map all classpath elements to absolute paths and reconstruct the classpath.
- */
- def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path)
-
/** pretty print class path */
def ppcp(s: String) = split(s) match {
case Nil => ""
@@ -46,7 +38,6 @@ object PathResolver {
/** Environment variables which java pays attention to so it
* seems we do as well.
*/
- def classPathEnv = envOrElse("CLASSPATH", "")
def sourcePathEnv = envOrElse("SOURCEPATH", "")
def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
@@ -86,7 +77,6 @@ object PathResolver {
def scalaHome = Environment.scalaHome
def scalaHomeDir = Directory(scalaHome)
- def scalaHomeExists = scalaHomeDir.isDirectory
def scalaLibDir = Directory(scalaHomeDir / "lib")
def scalaClassesDir = Directory(scalaHomeDir / "classes")
@@ -109,15 +99,7 @@ object PathResolver {
// classpath as set up by the runner (or regular classpath under -nobootcp)
// and then again here.
def scalaBootClassPath = ""
- // scalaLibDirFound match {
- // case Some(dir) if scalaHomeExists =>
- // val paths = ClassPath expandDir dir.path
- // join(paths: _*)
- // case _ => ""
- // }
-
def scalaExtDirs = Environment.scalaExtDirs
-
def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
override def toString = """
@@ -136,7 +118,7 @@ object PathResolver {
)
}
- def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
+ def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { // called from scalap
val s = new Settings()
s.classpath.value = path
new PathResolver(s, context) result
@@ -153,15 +135,15 @@ object PathResolver {
}
else {
val settings = new Settings()
- val rest = settings.processArguments(args.toList, false)._2
+ val rest = settings.processArguments(args.toList, processAll = false)._2
val pr = new PathResolver(settings)
println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
- pr.result.show
+ pr.result.show()
}
}
}
-import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
+import PathResolver.{ Defaults, Environment, ppcp }
class PathResolver(settings: Settings, context: JavaContext) {
def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext)
@@ -188,6 +170,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
object Calculated {
def scalaHome = Defaults.scalaHome
def useJavaClassPath = settings.usejavacp.value || Defaults.useJavaClassPath
+ def useManifestClassPath= settings.usemanifestcp.value
def javaBootClassPath = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
def javaExtDirs = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
@@ -227,6 +210,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
classesInPath(scalaBootClassPath), // 4. The Scala boot class path.
contentsOfDirsInPath(scalaExtDirs), // 5. The Scala extension class path.
classesInExpandedPath(userClassPath), // 6. The Scala application class path.
+ classesInManifest(useManifestClassPath), // 8. The Manifest class path.
sourcesInPath(sourcePath) // 7. The Scala source path.
)
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1b06ce2ff2..7da9479dab 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -16,8 +16,8 @@ trait CompileOutputCommon {
def verbose: Boolean
def info(msg: String) = if (verbose) echo(msg)
- def echo(msg: String) = {Console println msg; Console.flush}
- def warn(msg: String) = {Console.err println msg; Console.flush}
+ def echo(msg: String) = {Console println msg; Console.flush()}
+ def warn(msg: String) = {Console.err println msg; Console.flush()}
def fatal(msg: String) = { warn(msg) ; sys.exit(1) }
}
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
index 44a5b537b6..c196809da9 100644
--- a/src/continuations/library/scala/util/continuations/ControlContext.scala
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -183,7 +183,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
// need filter or other functions?
- final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = {
+ final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { // called by codegen from SelectiveCPSTransform
if (fun eq null)
this
else {
@@ -209,7 +209,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
}
}
- final def mapFinally(f: () => Unit): ControlContext[A,B,C] = {
+ final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform
if (fun eq null) {
try {
f()
diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala
index 90bab56805..573fae85e7 100644
--- a/src/continuations/library/scala/util/continuations/package.scala
+++ b/src/continuations/library/scala/util/continuations/package.scala
@@ -166,8 +166,8 @@ package object continuations {
throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
}
- def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = {
- new ControlContext(null, x)
+ def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { // called in code generated by SelectiveCPSTransform
+ new ControlContext[A, B, B](null, x)
}
/**
@@ -176,11 +176,11 @@ package object continuations {
* a final result.
* @see shift
*/
- def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = {
+ def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform
new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A])
}
- def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = {
+ def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform
ctx
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 00c72cf423..beab271a3b 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -2,11 +2,10 @@
package scala.tools.selectivecps
-import scala.tools.nsc.Global
-import scala.tools.nsc.typechecker.Modes
+import scala.tools.nsc.{ Global, Mode }
import scala.tools.nsc.MissingRequirementError
-abstract class CPSAnnotationChecker extends CPSUtils with Modes {
+abstract class CPSAnnotationChecker extends CPSUtils {
val global: Global
import global._
import analyzer.{AnalyzerPlugin, Typer}
@@ -97,7 +96,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
if (!cpsEnabled) return bounds
val anyAtCPS = newCpsParamsMarker(NothingClass.tpe, AnyClass.tpe)
- if (isFunctionType(tparams.head.owner.tpe) || isPartialFunctionType(tparams.head.owner.tpe)) {
+ if (isFunctionType(tparams.head.owner.tpe_*) || isPartialFunctionType(tparams.head.owner.tpe_*)) {
vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
if (hasCpsParamTypes(targs.last))
bounds.reverse match {
@@ -123,14 +122,14 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
import checker._
- override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = {
if (!cpsEnabled) return false
- vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + mode + " / " + pt)
val annots1 = cpsParamAnnotation(tree.tpe)
val annots2 = cpsParamAnnotation(pt)
- if ((mode & global.analyzer.PATTERNmode) != 0) {
+ if (mode.inPatternMode) {
//println("can adapt pattern annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
if (!annots1.isEmpty) {
return true
@@ -139,7 +138,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/*
// not precise enough -- still relying on addAnnotations to remove things from ValDef symbols
- if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
+ if ((mode & TYPEmode) != 0 && (mode & BYVALmode) != 0) {
if (!annots1.isEmpty) {
return true
}
@@ -148,16 +147,16 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/*
this interferes with overloading resolution
- if ((mode & global.analyzer.BYVALmode) != 0 && tree.tpe <:< pt) {
+ if ((mode & BYVALmode) != 0 && tree.tpe <:< pt) {
vprintln("already compatible, can't adapt further")
return false
}
*/
- if ((mode & global.analyzer.EXPRmode) != 0) {
+ if (mode.inExprMode) {
if ((annots1 corresponds annots2)(_.atp <:< _.atp)) {
vprintln("already same, can't adapt further")
false
- } else if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) {
+ } else if (annots1.isEmpty && !annots2.isEmpty && !mode.inByValMode) {
//println("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
if (!hasPlusMarker(tree.tpe)) {
// val base = tree.tpe <:< removeAllCPSAnnotations(pt)
@@ -170,10 +169,10 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
true
//}
} else false
- } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.RETmode) != 0)) {
+ } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && mode.inRetMode) {
vprintln("checking enclosing method's result type without annotations")
tree.tpe <:< pt.withoutAnnotations
- } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) {
+ } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && mode.inByValMode) {
val optCpsTypes: Option[(Type, Type)] = cpsParamTypes(tree.tpe)
val optExpectedCpsTypes: Option[(Type, Type)] = cpsParamTypes(pt)
if (optCpsTypes.isEmpty || optExpectedCpsTypes.isEmpty) {
@@ -189,21 +188,21 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else false
}
- override def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ override def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = {
if (!cpsEnabled) return tree
- vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
+ vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + mode + " / " + pt)
- val patMode = (mode & global.analyzer.PATTERNmode) != 0
- val exprMode = (mode & global.analyzer.EXPRmode) != 0
- val byValMode = (mode & global.analyzer.BYVALmode) != 0
- val retMode = (mode & global.analyzer.RETmode) != 0
+ val patMode = mode.inPatternMode
+ val exprMode = mode.inExprMode
+ val byValMode = mode.inByValMode
+ val retMode = mode.inRetMode
val annotsTree = cpsParamAnnotation(tree.tpe)
val annotsExpected = cpsParamAnnotation(pt)
// not sure I rephrased this comment correctly:
- // replacing `patMode` in the condition below by `patMode || ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode))`
+ // replacing `patMode` in the condition below by `patMode || ((mode & TYPEmode) != 0 && (mode & BYVALmode))`
// doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
if (patMode && !annotsTree.isEmpty) tree modifyType removeAllCPSAnnotations
else if (exprMode && !byValMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // shiftUnit
@@ -227,7 +226,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
// add a marker annotation that will make tree.tpe behave as pt, subtyping wise
// tree will look like having any possible annotation
-
+
// note 1: we are only adding a plus marker if the method's result type is a cps type
// (annotsExpected.nonEmpty == cpsParamAnnotation(pt).nonEmpty)
// note 2: we are not adding the expected cps annotations, since they will be added
@@ -240,7 +239,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/** Returns an adapted type for a return expression if the method's result type (pt) is a CPS type.
* Otherwise, it returns the `default` type (`typedReturn` passes `NothingClass.tpe`).
- *
+ *
* A return expression in a method that has a CPS result type is an error unless the return
* is in tail position. Therefore, we are making sure that only the types of return expressions
* are adapted which will either be removed, or lead to an error.
@@ -363,7 +362,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
global.globalError("not a single cps annotation: " + xs)
xs(0)
}
-
+
def emptyOrSingleList(xs: List[AnnotationInfo]) = if (xs.isEmpty) Nil else List(single(xs))
def transChildrenInOrder(tree: Tree, tpe: Type, childTrees: List[Tree], byName: List[Tree]) = {
@@ -400,11 +399,13 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
- override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
import scala.util.control._
if (!cpsEnabled) {
- if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
+ val report = try hasCpsParamTypes(tpe) catch { case _: MissingRequirementError => false }
+ if (report)
global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
+
return tpe
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 46c644bcd6..29480576ea 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -7,7 +7,6 @@ import scala.tools.nsc.Global
trait CPSUtils {
val global: Global
import global._
- import definitions._
var cpsEnabled = false
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
@@ -36,7 +35,7 @@ trait CPSUtils {
lazy val MarkerCPSAdaptMinus = rootMirror.getRequiredClass("scala.util.continuations.cpsMinus")
lazy val Context = rootMirror.getRequiredClass("scala.util.continuations.ControlContext")
- lazy val ModCPS = rootMirror.getRequiredPackage("scala.util.continuations")
+ lazy val ModCPS = rootMirror.getPackage("scala.util.continuations")
lazy val MethShiftUnit = definitions.getMember(ModCPS, cpsNames.shiftUnit)
lazy val MethShiftUnit0 = definitions.getMember(ModCPS, cpsNames.shiftUnit0)
@@ -57,12 +56,12 @@ trait CPSUtils {
protected def newMarker(sym: Symbol): AnnotationInfo = AnnotationInfo marker sym.tpe
protected def newCpsParamsMarker(tp1: Type, tp2: Type) =
- newMarker(appliedType(MarkerCPSTypes.tpe, List(tp1, tp2)))
+ newMarker(appliedType(MarkerCPSTypes, tp1, tp2))
// annotation checker
protected def annTypes(ann: AnnotationInfo): (Type, Type) = {
- val tp0 :: tp1 :: Nil = ann.atp.normalize.typeArgs
+ val tp0 :: tp1 :: Nil = ann.atp.dealiasWiden.typeArgs
((tp0, tp1))
}
protected def hasMinusMarker(tpe: Type) = tpe hasAnnotation MarkerCPSAdaptMinus
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index 8b39bf3961..323e894b51 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -2,13 +2,10 @@
package scala.tools.selectivecps
-import scala.tools.nsc._
import scala.tools.nsc.transform._
import scala.tools.nsc.symtab._
import scala.tools.nsc.plugins._
-import scala.tools.nsc.ast._
-
/**
* In methods marked @cps, explicitly name results of calls to other @cps methods
*/
@@ -20,13 +17,14 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
import definitions._ // standard classes and methods
import typer.atOwner // methods to type trees
+ override def description = "ANF pre-transform for @cps"
+
/** the following two members override abstract members in Transform */
val phaseName: String = "selectiveanf"
protected def newTransformer(unit: CompilationUnit): Transformer =
new ANFTransformer(unit)
-
class ANFTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
implicit val _unit = unit // allow code in CPSUtils.scala to report errors
@@ -131,7 +129,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
def transformPureMatch(tree: Tree, selector: Tree, cases: List[CaseDef]) = {
val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
- // if (!hasPlusMarker(body.tpe)) body.tpe = body.tpe withAnnotation newPlusMarker() // TODO: to avoid warning
+ // if (!hasPlusMarker(body.tpe)) body modifyType (_ withAnnotation newPlusMarker()) // TODO: to avoid warning
val bodyVal = transExpr(body, None, ext) // ??? triggers "cps-transformed unexpectedly" warning in transTailValue
treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
}
@@ -172,7 +170,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
debuglog("transforming valdef " + vd.symbol)
if (getExternalAnswerTypeAnn(tpt.tpe).isEmpty) {
-
+
atOwner(vd.symbol) {
val rhs1 = transExpr(rhs, None, None)
@@ -471,7 +469,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
val sym: Symbol = (
currentOwner.newValue(newTermName(unit.fresh.newName("tmp")), tree.pos, Flags.SYNTHETIC)
setInfo valueTpe
- setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil))
+ setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe_*, Nil, Nil))
)
expr.changeOwner(currentOwner -> sym)
@@ -503,9 +501,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// TODO: better yet: do without annotations on symbols
val spcVal = getAnswerTypeAnn(anfRhs.tpe)
- if (spcVal.isDefined) {
- tree.symbol.setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
- }
+ spcVal foreach (_ => tree.symbol setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe_*, Nil, Nil)))
(stms:::List(treeCopy.ValDef(tree, mods, name, tpt, anfRhs)), linearize(spc, spcVal)(unit, tree.pos))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
index 237159795a..c16cce2f2c 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -3,15 +3,11 @@
package scala.tools.selectivecps
import scala.tools.nsc
-import scala.tools.nsc.typechecker._
import nsc.Global
-import nsc.Phase
import nsc.plugins.Plugin
import nsc.plugins.PluginComponent
class SelectiveCPSPlugin(val global: Global) extends Plugin {
- import global._
-
val name = "continuations"
val description = "applies selective cps conversion"
@@ -26,7 +22,6 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin {
override val runsBefore = List("uncurry")
}
-
val components = List[PluginComponent](anfPhase, cpsPhase)
val checker = new CPSAnnotationChecker {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index 4482bf2b7c..846ce01953 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -2,13 +2,8 @@
package scala.tools.selectivecps
-import scala.collection._
-
-import scala.tools.nsc._
import scala.tools.nsc.transform._
import scala.tools.nsc.plugins._
-
-import scala.tools.nsc.ast.TreeBrowsers
import scala.tools.nsc.ast._
/**
@@ -22,6 +17,8 @@ abstract class SelectiveCPSTransform extends PluginComponent with
import definitions._ // standard classes and methods
import typer.atOwner // methods to type trees
+ override def description = "@cps-driven transform of selectiveanf assignments"
+
/** the following two members override abstract members in Transform */
val phaseName: String = "selectivecps"
@@ -56,7 +53,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case _ =>
getExternalAnswerTypeAnn(tp) match {
case Some((res, outer)) =>
- appliedType(Context.tpe, List(removeAllCPSAnnotations(tp), res, outer))
+ appliedType(Context.tpeHK, List(removeAllCPSAnnotations(tp), res, outer))
case _ =>
removeAllCPSAnnotations(tp)
}
@@ -90,7 +87,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
//gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage),
//ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR)
//gen.mkAttributedRef(ModCPS.tpe, MethShiftR) // TODO: correct?
- debuglog("funR.tpe = " + funR.tpe)
+ debuglog("funR.tpe: " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -102,12 +99,12 @@ abstract class SelectiveCPSTransform extends PluginComponent with
debuglog("found shiftUnit: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct?
- debuglog("funR.tpe = " + funR.tpe)
+ debuglog("funR.tpe: " + funR.tpe)
Apply(
TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe,
List(targs(0).tpe, targs(1).tpe))),
args.map(transform(_))
- ).setType(appliedType(Context.tpe, List(targs(0).tpe,targs(1).tpe,targs(1).tpe)))
+ ).setType(appliedType(Context.tpeHK, List(targs(0).tpe,targs(1).tpe,targs(1).tpe)))
}
case Apply(TypeApply(fun, targs), args)
@@ -115,7 +112,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
log("found reify: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct?
- debuglog("funR.tpe = " + funR.tpe)
+ debuglog("funR.tpe: " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -192,7 +189,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val targettp = transformCPSType(tree.tpe)
val pos = catches.head.pos
- val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass, ThrowableClass.tpe, targettp))
val funDef = localTyper.typedPos(pos) {
ValDef(funSym, Match(EmptyTree, catches1))
}
@@ -350,7 +347,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val ctxSym = currentOwner.newValue(newTermName("" + vd.symbol.name + cpsNames.shiftSuffix)).setInfo(rhs1.tpe)
val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
def ctxRef = localTyper.typed(Ident(ctxSym))
- val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
+ val argSym = currentOwner.newValue(vd.symbol.name.toTermName).setInfo(tpe)
val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue))))
val switchExpr = localTyper.typedPos(vd.symbol.pos) {
val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing!
diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala
deleted file mode 100644
index e60d16c0d5..0000000000
--- a/src/detach/library/scala/remoting/Channel.scala
+++ /dev/null
@@ -1,190 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Channel.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.remoting
-
-import java.io._
-import java.net._
-import java.rmi.server.RMIClassLoader
-
-/** <p>
- * The class <code>Channel</code> implements (basic) typed channels
- * which use <a href="http://java.sun.com/docs/books/tutorial/networking/sockets/"
- * target="_top"/>Java socket</a> communication and Scala type manifests to
- * provide type-safe send/receive operations between a localhost and another
- * remote machine by specifying some <code>host</code> and <code>port</code>.
- * </p>
- *
- * @author Stephane Micheloud
- * @version 1.1
- */
-class Channel protected (socket: Socket) {
-
- // Create a socket without a timeout
- def this(host: String, port: Int) = this(new Socket(host, port))
-
- // // Create a socket with a timeout
- // val sockaddr: SocketAddress = new InetSocketAddress(addr, port)
- // val socket = new Socket()
- // // If the timeout occurs, SocketTimeoutException is thrown.
- // socket.connect(sockaddr, 2000) // 2 seconds
-
- /** Returns the local address of this channel. */
- val host = socket.getInetAddress.getHostAddress
-
- /** Returns the port on which this channel is listening. */
- val port = socket.getLocalPort
-
- private var cl: ClassLoader =
- try {
- // requires permission in Java policy file
- val codebase = System.getProperty("java.rmi.server.codebase")
- if (codebase != null) info("codebase="+codebase)
- RMIClassLoader.getClassLoader(codebase)
- }
- catch {
- case e: Exception =>
- sys.error("Class loader undefined: " + e.getMessage)
- null
- }
- def classLoader: ClassLoader = cl
- def classLoader_=(x: ClassLoader) { cl = x }
-
- info(""+this)
-
- private class CustomObjectInputStream(in: InputStream)
- extends ObjectInputStream(in) {
- override def resolveClass(desc: ObjectStreamClass): Class[_] =
- if (cl eq null)
- super.resolveClass(desc)
- else
- try {
- info("resolve class "+desc.getName)
- cl loadClass desc.getName
- }
- catch {
- case e: ClassNotFoundException =>
- super.resolveClass(desc)
- }
- }
-
- // lazy modifier is required!
- private lazy val in =
- try {
- new CustomObjectInputStream(socket.getInputStream)
- }
- catch {
- case e: IOException =>
- sys.error("Input stream undefined: "+e.getMessage+" ("+this+")")
- null
- }
- private lazy val out =
- try {
- new ObjectOutputStream(socket.getOutputStream)
- }
- catch {
- case e: IOException =>
- sys.error("Output stream undefined: "+e.getMessage+" ("+this+")")
- null
- }
-
- /** <code>receive&lt;primtype&gt;</code> methods may throw an
- * <code>IOException</code>.
- */
- def receiveUnit = receive[Unit]
- def receiveBoolean = receive[Boolean]
- def receiveByte = receive[Byte]
- def receiveChar = receive[Char]
- def receiveShort = receive[Short]
- def receiveInt = receive[Int]
- def receiveLong = receive[Long]
- def receiveFloat = receive[Float]
- def receiveDouble = receive[Double]
- def receiveString = receive[String]
-
- /** <code>receive</code> method may throw either an
- * <code>ClassNotFoundException</code> or an <code>IOException</code>.
- *
- * @throw <code>ChannelException</code> if received value has not
- * the expected type.
- */
- @throws(classOf[ChannelException])
- def receive[T](implicit expected: scala.reflect.ClassTag[T]): T = {
- val found = in.readObject().asInstanceOf[reflect.ClassTag[_]]
- info("receive: found="+found+", expected="+expected)
- import scala.reflect.ClassTag
- val x = found match {
- case ClassTag.Unit => ()
- case ClassTag.Boolean => in.readBoolean()
- case ClassTag.Byte => in.readByte()
- case ClassTag.Char => in.readChar()
- case ClassTag.Short => in.readShort()
- case ClassTag.Int => in.readInt()
- case ClassTag.Long => in.readLong()
- case ClassTag.Float => in.readFloat()
- case ClassTag.Double => in.readDouble()
- case _ => in.readObject()
- }
- val res = if (found <:< expected)
- x.asInstanceOf[T]
- else
- throw new ChannelException(
- "\n\tfound \""+found+"\"\n\texpected \""+expected+"\"")
- info("received "+res+" (available="+in.available+")")
- res
- }
-
- /** <code>?</code> method may throw either an
- * <code>ClassNotFoundException</code> or an <code>IOException</code>.
- */
- def ?[T](implicit t: scala.reflect.ClassTag[T]): T = receive[T](t)
-
- /** <code>send</code> method may throw an <code>IOException</code>.
- */
- def send[T](x: T)(implicit t: scala.reflect.ClassTag[T]) {
- out writeObject t
- x match {
- case x: Unit => // nop
- case x: Boolean => out writeBoolean x
- case x: Byte => out writeByte x
- case x: Char => out writeChar x
- case x: Short => out writeShort x
- case x: Int => out writeInt x
- case x: Long => out writeLong x
- case x: Float => out writeFloat x
- case x: Double => out writeDouble x
- case x => out writeObject x
- }
- out.flush()
- info("sent "+x)
- }
-
- /** <code>!</code> method may throw an <code>IOException</code>.
- */
- def ![T](x: T)(implicit m: scala.reflect.ClassTag[T]) { send(x)(m) }
-
- def close() {
- try { socket.close() }
- catch { case e: IOException => }
- info(this+" closed")
- }
-
- override def toString: String = socket.toString
-
- private def info(msg: String) {
- runtime.remoting.Debug.info("[Channel] "+msg)
- }
-}
-
-/** <code>ChannelException</code> may be thrown by the operation
- * <code>receive</code> when the received data has not the expected type.
- */
-case class ChannelException(msg: String) extends IOException(msg)
-
diff --git a/src/detach/library/scala/remoting/Debug.scala b/src/detach/library/scala/remoting/Debug.scala
deleted file mode 100644
index 79f2bcedde..0000000000
--- a/src/detach/library/scala/remoting/Debug.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Debug.scala 17412 2009-03-31 10:08:25Z michelou $
-
-package scala.remoting
-
-/**
- * @author Stephane Micheloud
- * @version 1.0
- */
-object Debug extends runtime.remoting.Debug {
- private val f = new java.text.SimpleDateFormat("HH:mm:ss")
- private val c = new java.util.GregorianCalendar
-
- def getTime: String = f format c.getTime
-
- def getLocation(obj: AnyRef): String = {
- val s = obj.getClass().getClassLoader().toString()
- s substring s.indexOf('[')
- }
-}
diff --git a/src/detach/library/scala/remoting/ServerChannel.scala b/src/detach/library/scala/remoting/ServerChannel.scala
deleted file mode 100644
index 7828f85a1d..0000000000
--- a/src/detach/library/scala/remoting/ServerChannel.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: ServerChannel.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.remoting
-
-import java.net.{ServerSocket, Socket}
-
-/** <p>
- * Creates a server channel and binds its associated socket to the
- * specified port number.<br/>
- * Example:
- * </p><pre>
- * <b>class</b> ComputeChannel(s: Socket) <b>extends</b> Channel(s) {
- * <b>def</b> receiveFunc = receive[Int => Int]
- * }
- * <b>class</b> ComputeServer(p: Int)
- * <b>extends</b> AbstractServerChannel[ComputeChannel](p) {
- * <b>def</b> newChannel(s: Socket) = <b>new</b> ComputeChannel(s)
- * }</pre>
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class ServerChannel(p: Int) extends AbstractServerChannel[Channel](p) {
- def newChannel(s: Socket) = new Channel(s)
-}
-
-abstract class AbstractServerChannel[T <: Channel](_port: Int) {
-
- /** Creates an input channel and binds its associated socket to any
- * free port.
- */
- def this() = this(0)
-
- // The maximum queue length for incoming requests to connect is set to 50.
- private val serverSocket = new ServerSocket(_port)
-
- /** Returns the local address of this channel. */
- val host = serverSocket.getInetAddress.getHostAddress
-
- /** Returns the port on which this channel is listening. */
- val port = serverSocket.getLocalPort
- info("Listening on port "+port)
-
- protected def newChannel(socket: Socket): T
-
- def accept: T = {
- System.gc() // required!
- newChannel(serverSocket.accept)
- }
-
- def close() {
- try { serverSocket.close() }
- catch { case e: java.io.IOException => }
- info("Server socket "+host+":"+port+" closed")
- }
-
- protected def info(msg: String) {
- runtime.remoting.Debug.info("[ServerChannel] "+msg)
- }
-}
diff --git a/src/detach/library/scala/remoting/detach.scala b/src/detach/library/scala/remoting/detach.scala
deleted file mode 100644
index 51a3ac515d..0000000000
--- a/src/detach/library/scala/remoting/detach.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: detach.scala 16901 2009-01-13 15:37:05Z michelou $
-
-package scala.remoting
-
-
-/** The <code>detach</code> object is a <em>marker object</em> which informs
- * the Scala compiler that arguments whose type is a function type are
- * eligible for remote closure generation.
- *
- * @author Stephane Micheloud
- * @version 1.0, 13/07/2005
- */
-object detach {
-
- def apply[R](f: Function0[R]): Function0[R] = f
- def apply[T0, R](f: Function1[T0, R]): Function1[T0, R] = f
- def apply[T0, T1, R](f: Function2[T0, T1, R]): Function2[T0, T1, R] = f
- def apply[T0, T1, T2, R](f: Function3[T0, T1, T2, R]): Function3[T0, T1, T2, R] = f
- def apply[T0, T1, T2, T3, R](f: Function4[T0, T1, T2, T3, R]): Function4[T0, T1, T2, T3, R] = f
- def apply[T0, T1, T2, T3, T4, R](f: Function5[T0, T1, T2, T3, T4, R]): Function5[T0, T1, T2, T3, T4, R] = f
- def apply[T0, T1, T2, T3, T4, T5, R](f: Function6[T0, T1, T2, T3, T4, T5, R]): Function6[T0, T1, T2, T3, T4, T5, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, R](f: Function7[T0, T1, T2, T3, T4, T5, T6, R]): Function7[T0, T1, T2, T3, T4, T5, T6, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, R](f: Function8[T0, T1, T2, T3, T4, T5, T6, T7, R]): Function8[T0, T1, T2, T3, T4, T5, T6, T7, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, R](f: Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R]): Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R] = f
-
- // since 2.7.0
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R](f: Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R]): Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R](f: Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R]): Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R](f: Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R]): Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R](f: Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R]): Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R](f: Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R]): Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R](f: Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R]): Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R](f: Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R]): Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R](f: Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R]): Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R](f: Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R]): Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R](f: Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R]): Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R](f: Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R]): Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R](f: Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R]): Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R](f: Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R]): Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R] = f
-}
-
diff --git a/src/detach/library/scala/runtime/RemoteRef.scala b/src/detach/library/scala/runtime/RemoteRef.scala
deleted file mode 100644
index e65b22cb71..0000000000
--- a/src/detach/library/scala/runtime/RemoteRef.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteRef.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.runtime
-
-import java.net.{InetAddress, MalformedURLException}
-import java.rmi.{NoSuchObjectException, NotBoundException, Remote}
-import java.rmi.registry.{LocateRegistry, Registry}
-import java.rmi.server.{ExportException, RemoteObject, UnicastRemoteObject}
-
-import scala.runtime.remoting.{Debug, RemoteGC}
-
-/**
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-object RemoteRef { /*extends Thread {
- start()
-
- private class QuitException extends Exception
- private var isTerminated = false
-
- // keeps track of live remote objects
- val remoteGC = new RemoteGC
-
- override def run() {
- info("started thread")
- try {
- while (!isTerminated) {
- this.synchronized {
- try {
- wait(200)
- } catch {
- case _: InterruptedException =>
- if (isTerminated) throw new QuitException
- }
- remoteGC.gc()
- if (remoteGC.allClosed)
- throw new QuitException
- } // synchronized
-
- }
- } catch {
- case _: QuitException =>
- // allow thread to exit
- }
- }
-*/
- try {
- val prop = System.getProperty("sun.rmi.dgc.server.gcInterval")
- if (prop eq null)
- System.setProperty("sun.rmi.dgc.server.gcInterval", "10000")
- }
- catch {
- case e =>
- error(e.getMessage)
- }
-
- private val host =
- try {
- val prop = System.getProperty("java.rmi.server.hostname")
- if (prop ne null) prop else InetAddress.getLocalHost.getHostAddress
- }
- catch {
- case e =>
- warning(e.getMessage)
- InetAddress.getLocalHost.getHostAddress
- }
-
- private val port =
- try {
- val prop = System.getProperty("scala.remoting.port")
- if (prop ne null) prop.toInt else Registry.REGISTRY_PORT
- }
- catch {
- case e =>
- warning(e.getMessage)
- Registry.REGISTRY_PORT // default port
- }
-
- private val registry =
- try {
- LocateRegistry.createRegistry(port)
- }
- catch {
- case e =>
- warning(e.getMessage)
- LocateRegistry.getRegistry(host, port)
- }
-
- private val prefix = "//"+host+":"+port+"/"
- printDebugInfos
-
- // Variant 1: rebind/unbind
- def bind(name: String, x: Remote): Remote =
- try {
- registry.rebind(prefix+name, x)
- info("\""+prefix+name+"\" bound")
- val stub = RemoteObject.toStub(x)
- //remoteGC.newRef(stub)
- stub
- } catch {
- case e: MalformedURLException =>
- error(e.getMessage); null
- case e: ExportException =>
- info(""+e); null
- case e: Exception => // AlreadyBoundException, etc..
- throw e
- }
-
- def unbind(name: String) =
- try {
- registry.unbind(prefix+name)
- info("\""+name+"\" unbound")
- } catch {
- case e: java.io.EOFException =>
- warning(e.getMessage)
- case e: NotBoundException =>
- warning(e.getMessage+" already unbound")
- case e: MalformedURLException =>
- error(e.getMessage)
- case e: Exception =>
- throw e
- }
-/*
- // Variant 2: un-/exportObject
- def bind(name: String, x: Remote): Remote =
- try {
- val ex = UnicastRemoteObject.exportObject(x)
- registry.rebind(prefix+name, ex)
- info("\""+prefix+name+"\" bound")
- //val stub = RemoteObject.toStub(ex)
- //remoteGC.newRef(ex)
- ex //stub
- } catch {
- case e: MalformedURLException =>
- error(e.getMessage); null
- case e: ExportException =>
- info(""+e); null
- case e: Exception => // AlreadyBoundException, etc..
- throw e
- }
-
- def unbind(x: Remote) {
- try {
- UnicastRemoteObject.unexportObject(x, false)
- info("\""+x+"\" unbound")
- } catch {
- case e: java.io.EOFException =>
- warning(e.getMessage)
- case e: NotBoundException =>
- warning(e.getMessage+" already unbound")
- case e: MalformedURLException =>
- error(e.getMessage)
- case e: Exception =>
- throw e
- }
- }
-*/
- private def info(msg: String) { Debug.info("[RemoteRef] "+msg) }
- private def warning(msg: String) { Debug.warning("[RemoteRef] "+msg) }
- private def error(msg: String) { Debug.error("[RemoteRef] "+msg) }
-
- private def printDebugInfos() {
- def property(name: String): String =
- name+"="+(
- try { System.getProperty(name, "") }
- catch { case e => warning(e.getMessage); "?" })
- info(property("java.rmi.server.hostname"))
- info(property("sun.rmi.dgc.server.gcInterval"))
- info("registry="+registry)
- info("prefix="+prefix)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/Debug.scala b/src/detach/library/scala/runtime/remoting/Debug.scala
deleted file mode 100644
index 06cdc67997..0000000000
--- a/src/detach/library/scala/runtime/remoting/Debug.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Debug.scala 17777 2009-05-19 18:16:25Z michelou $
-
-package scala.runtime.remoting
-
-/**
- * @author Stephane Micheloud
- * @version 1.0
- */
-object Debug extends Debug {
- override def info (msg: String) { if (lib) super.info(msg) }
- override def verbose(msg: String) { if (lib) super.verbose(msg) }
- override def warning(msg: String) { if (lib) super.warning(msg) }
- override def error (msg: String) { if (lib) super.error(msg) }
-}
-
-/**
- * @author Stephane Micheloud
- * @version 1.0
- */
-class Debug(tag: String) {
-
- def this() = this("")
-
- object Level extends Enumeration {
- type Level = Value
- val SILENT, ERROR, WARNING, VERBOSE, INFO = Value
- }
-
- private val level0 =
- try {
- val prop = System.getProperty("scala.remoting.logLevel")
- if (prop ne null) prop.toLowerCase else ""
- }
- catch {
- case e =>
- Console.err.println(e.getMessage)
- ""
- }
-
- import Level._
- protected var (lev, lib) = {
- val p = java.util.regex.Pattern.compile("(error|warning|verbose|info)(\\,lib)?(.*)")
- val m = p matcher level0
- val (s, b) =
- if (m.matches) (m.group(1), m.group(2) ne null)
- else ("", false)
- s match {
- case "error" => (ERROR , b)
- case "warning" => (WARNING, b)
- case "verbose" => (VERBOSE, b)
- case "info" => (INFO , b)
- case _ => (SILENT , false)
- }
- }
-
- def level = lev
- def level_= (lev: Level) = { this.lev = lev }
-
- private val tag0: String =
- if (tag != null & tag.length > 0) tag+" " else ""
-
- def info(msg: String) {
- if (lev >= INFO) Console.println(tag0 + "(info): " + msg)
- }
-
- def verbose(msg: String) {
- if (lev >= VERBOSE) Console.println(tag0 + "(verb): " + msg)
- }
-
- def warning(msg: String) {
- if (lev >= WARNING) Console.err.println(tag0 + "(warn): " + msg)
- }
-
- def error(msg: String) {
- if (lev >= ERROR) Console.err.println(tag0 + "(erro): " + msg)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala b/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
deleted file mode 100644
index 1105832ef7..0000000000
--- a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RegistryDelegate.scala 18234 2009-07-07 13:21:57Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.{RMISecurityManager, Remote, RemoteException}
-import java.rmi.registry.{LocateRegistry, Registry}
-import java.rmi.server.UnicastRemoteObject
-
-/**
- * <p>
- * This class implements the registry delegate concept
- * (see http://www.genady.net/rmi/v20/docs/delegate/RegistryDelegate.html)
- * </p>
- * <p>
- * In order to enforce some level of security, the standard RMI registry
- * implementation (e.g. <code>rmiregistry.exe</code>) only allows processes
- * on the same host to register objects in the registry (think of a bank
- * running a registry on one of its servers, and doesn't want anybody
- * modifying it). So, by design, if a process tries to
- * <code>bind(String, Remote)</code> an object to a remote registry,
- * an exception will be thrown.
- * </p>
- * <p>
- * However, the design of a distributed system may require remote clients to
- * register themselves in a central registry. If such system is deployed in a
- * controlled and trusted environment (e.g., a firewalled intranet with tight
- * access control), the security risk may be acceptable.
- * </p>
- * <p>
- * The simplest technical solution to the remote registration problem is to
- * have a registry delegate. A registry delegate is an object that serves as
- * a proxy for the real registry. The delegate itself usually appears in the
- * registry under a well known name. It implements the Registry interface and
- * simply delegates all method calls to the appropriate methods of the real
- * registry. The delegate is allowed to perform bind and unbind operations
- * because it is running on the same host as the registry.
- * </p>
- * <p>
- * The common scenario for starting a registry and creating the delegate is
- * starting a class with the following <code>main(Array[String])</code> method:
- * </p>
- * <pre>
- * @throws(classOf[AccessException], classOf[RemoteException], classOf[AlreadyBoundException])
- * <b>object</b> namingService {
- * <b>def</b> main(args: Array[String]) {
- * <b>if</b> (System.getSecurityManager() == <b>null</b>)
- * System.setSecurityManager(<b>new</b> RMISecurityManager())
- *
- * <b>val</b> registry = LocateRegistry.createRegistry(REGISTRY_PORT)
- * registry.bind(DELEGATE_NAME, <b>new</b> RegistryDelegate());
- *
- * do {
- * <b>try</b> {
- * Thread.sleep(Long.MAX_VALUE)
- * } <b>catch</b> {
- * <b>case</b> e: InterruptedException => // do nothing
- * <b>case</b> e: Throwable => e.printStackTrace(); sys.exit(1)
- * }
- * } while (<b>true</b>)
- * }
- * }</pre>
- * <p>
- * The common usage scenario looks something like:
- * </p><pre>
- * Registry remoteRegistry = LocateRegistry.getRegistry("remotehost.mycompany.com");
- * Registry delegate = (Registry) remoteRegistry.lookup(DELEGATE_NAME);
- * delegate.bind("someName", <b>new</b> SomeRemoteObject());</pre>
- * <p>
- * The <code>getRegistryDelegate(String)</code> method is a helper method
- * that fetches the registry delegate for you.
- * </p>
- * <p>
- * The <code>main(Array[String])</code> method of this class will create a
- * local registry on the default port, create a registry delegate and bind
- * it under the well known name that you chose in the wizard
- * (<code>DELEGATE_NAME</code>).
- * </p>
- *
- * @author Genady Beryozkin, rmi-info@genady.net
- */
-
-object RMIDelegate {
- /** The name under which the delegate appears in the registry. */
- val DELEGATE_NAME = "foo"
-
- /** This method retrieves the registry delegate from a registry that is
- * running on a remote host.
- */
- @throws(classOf[RemoteException])
- def getRegistryDelegate(remoteHost: String): Registry =
- getRegistryDelegate(remoteHost, Registry.REGISTRY_PORT)
-
- /** This method retrieves the registry delegate from a registry that is
- * running on a remote host.
- */
- @throws(classOf[RemoteException])
- def getRegistryDelegate(remoteHost: String, remotePort: Int): Registry = {
- val registry = LocateRegistry.getRegistry(remoteHost, remotePort)
- (registry lookup DELEGATE_NAME).asInstanceOf[Registry]
- }
-
- /** A simple way to run a registry and bind a registry delegate. */
- @throws(classOf[RemoteException])
- def main(args: Array[String]) {
- var port = Registry.REGISTRY_PORT
-
- if (args.length > 0) {
- if (args(0) equals "-help") {
- println("Usage: rmidelegate <options> <port>")
- sys.exit(0)
- }
- try {
- port = args(0).toInt
- } catch {
- case e: NumberFormatException =>
- println("Usage: rmidelegate <options> <port>")
- sys.exit(1)
- }
- val opts = args filter (_ startsWith "-J-D")
- for (opt <- opts) {
- val x = opt.substring(4) split "="
- if (x.length == 2) System.setProperty(x(0), x(1))
- else System.setProperty(x(0), "")
- }
- }
-
- if (System.getSecurityManager() == null)
- System.setSecurityManager(new RMISecurityManager() {
- override def checkPermission(p: java.security.Permission) {}
- })
-
-
- val registry = LocateRegistry.createRegistry(port)
- registry.bind(DELEGATE_NAME, new RegistryDelegate())
-
- do {
- try {
- Thread.sleep(Long.MaxValue)
- } catch {
- case e: InterruptedException =>
- // do nothing
- case e: Throwable =>
- e.printStackTrace()
- sys.exit(1)
- }
- } while (true)
- }
-
-}
-
-/** Create a delegate for a user provided registry instance. The registry is
- * assumed to be a local registry, as there is no point in creating a delegate
- * for a remote registry.
- */
-class RegistryDelegate(reg: Registry) extends UnicastRemoteObject with Registry {
- /** The local registry */
- private val localRegistry: Registry = reg
-
- /** Create a delegate for a local registry that is bound to the default
- * local port (1099).
- */
- def this() = this(LocateRegistry.getRegistry())
-
- /** Create a delegate for a local registry that is bound to a user
- * specified port.
- */
- def this(port: Int) = this(LocateRegistry.getRegistry(port))
-
- @throws(classOf[RemoteException])
- def bind(name: String, obj: Remote) { localRegistry.bind(name, obj) }
-
- @throws(classOf[RemoteException])
- def list(): Array[String] = localRegistry.list()
-
- @throws(classOf[RemoteException])
- def lookup(name: String): Remote = localRegistry.lookup(name)
-
- @throws(classOf[RemoteException])
- def rebind(name: String, obj: Remote) { localRegistry.rebind(name, obj) }
-
- @throws(classOf[RemoteException])
- def unbind(name: String) { localRegistry.unbind(name) }
-
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala b/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
deleted file mode 100644
index ff6c8f6b6c..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteBooleanRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{BooleanRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteBooleanRef</code> provides a remote interface
- * for manipulating boolean references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteBooleanRef {
- def elem_=(value: Boolean)
- def elem: Boolean
-}
-
-/**
- * The class <code>RemoteBooleanRefImpl</code> implements a remote (global)
- * boolean reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.BooleanRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteBooleanRefImpl(name: String, x: BooleanRef)
-extends UnicastRemoteObject with RemoteBooleanRef with Unreferenced {
- def elem_=(value: Boolean) { x.elem = value }
- def elem: Boolean = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteBooleanRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala b/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
deleted file mode 100644
index 335f0d9019..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteByteRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ByteRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteByteRef</code> provides a remote interface
- * for manipulating byte references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteByteRef {
- def elem_=(value: Byte)
- def elem: Byte
-}
-
-/**
- * The class <code>RemoteByteRefImpl</code> implements a remote (global)
- * byte reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ByteRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteByteRefImpl(name: String, x: ByteRef)
-extends UnicastRemoteObject with RemoteByteRef with Unreferenced {
- def elem_=(value: Byte) { x.elem = value }
- def elem: Byte = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteByteRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala b/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
deleted file mode 100644
index e0f48eb970..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteCharRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{CharRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteCharRef</code> provides a remote interface
- * for manipulating character references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteCharRef {
- def elem_=(value: Char)
- def elem: Char
-}
-
-/**
- * The class <code>RemoteCharRefImpl</code> implements a remote (global)
- * character reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.CharRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteCharRefImpl(name: String, x: CharRef)
-extends UnicastRemoteObject with RemoteCharRef with Unreferenced {
- def elem_=(value: Char) { x.elem = value }
- def elem: Char = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteCharRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala b/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
deleted file mode 100644
index 2e1319595a..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteDoubleRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{DoubleRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteDoubleRef</code> provides..
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteDoubleRef {
- def elem_=(value: Double)
- def elem: Double
-}
-
-/**
- * The class <code>RemoteDoubleRefImpl</code> implements a remote (global)
- * double reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.DoubleRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteDoubleRefImpl(name: String, x: DoubleRef)
-extends UnicastRemoteObject with RemoteDoubleRef with Unreferenced {
- def elem_=(value: Double) { x.elem = value }
- def elem: Double = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteDoubleRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala b/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
deleted file mode 100644
index f4e61ea6da..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteFloatRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{FloatRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteFloatRef</code> provides a remote interface
- * for manipulating float references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteFloatRef {
- def elem_=(value: Float)
- def elem: Float
-}
-
-/**
- * The class <code>RemoteFloatRefImpl</code> implements a remote (global)
- * float reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.FloatRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteFloatRefImpl(name: String, x: FloatRef)
-extends UnicastRemoteObject with RemoteFloatRef with Unreferenced {
- def elem_=(value: Float) { x.elem = value }
- def elem: Float = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteIntFloatImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteGC.scala b/src/detach/library/scala/runtime/remoting/RemoteGC.scala
deleted file mode 100644
index 393c031bfc..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteGC.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteGC.scala 17547 2009-04-21 13:56:28Z michelou $
-
-package scala.runtime.remoting
-
-import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
-import java.rmi.{NoSuchObjectException, Remote}
-import java.rmi.server.UnicastRemoteObject
-import scala.collection.mutable
-
-/**
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-// Adapted from scala.actors.ActorGC
-private [runtime] class RemoteGC {
-
- private val refQueue = new ReferenceQueue[Remote]
- private val refSet = new mutable.HashSet[Reference[T] forSome { type T <: Remote }]
-
- private var liveRefs = 0
-
- def newRef(a: Remote) = synchronized {
- refSet += new WeakReference(a, refQueue)
- liveRefs += 1
- info("added object reference \""+a+"\" ("+liveRefs+")")
- }
-
- def gc() = synchronized {
- info("GC called ("+liveRefs+")")
- // check for unreachable object references
- def drain() {
- val wr = refQueue.poll
- if (wr != null) {
- val msg = try {
- UnicastRemoteObject.unexportObject(wr.get, true/*force*/)
- "removed object reference"
- }
- catch {
- case e: NoSuchObjectException =>
- "object already unbound"
- }
- info(msg+" ("+liveRefs+")")
- liveRefs -= 1
- refSet -= wr
- // continue draining
- drain()
- }
- }
- drain()
- }
-
- def allClosed: Boolean = synchronized {
- liveRefs <= 0
- }
-
- private def info(msg: String) { Debug.info("[RemoteGC] "+msg) }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala b/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
deleted file mode 100644
index b14403f6ca..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteIntRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{IntRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteIntRef</code> provides a remote interface
- * for manipulating integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteIntRef {
- def elem_=(value: Int)
- def elem: Int
-}
-
-/**
- * The class <code>RemoteIntRefImpl</code> implements a remote (global)
- * integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.IntRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteIntRefImpl(name: String, x: IntRef)
-extends UnicastRemoteObject with RemoteIntRef with Unreferenced {
- def elem_=(value: Int) { x.elem = value }
- def elem: Int = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteIntRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala b/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
deleted file mode 100644
index da83491489..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteLongRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{LongRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteLongRef</code> provides a remote interface
- * for manipulating long integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteLongRef {
- def elem_=(value: Long)
- def elem: Long
-}
-
-/**
- * The class <code>RemoteLongRefImpl</code> implements a remote (global)
- * long integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.LongRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteLongRefImpl(name: String, x: LongRef)
-extends UnicastRemoteObject with RemoteLongRef with Unreferenced {
- def elem_=(value: Long) { x.elem = value }
- def elem: Long = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteLongRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala b/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
deleted file mode 100644
index 9f27b26114..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteObjectRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ObjectRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteObjectRef</code> provides a remote interface
- * for manipulating object references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteObjectRef {
- def elem_=(value: AnyRef)
- def elem: AnyRef
-}
-
-/**
- * The class <code>RemoteObjectRefImpl</code> implements a remote (global)
- * object reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ObjectRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteObjectRefImpl(name: String, x: ObjectRef)
-extends UnicastRemoteObject with RemoteObjectRef with Unreferenced {
- def elem_=(value: AnyRef) { x.elem = value }
- def elem: AnyRef = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteObjectRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala b/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
deleted file mode 100644
index 2ced9dbc83..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteShortRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ShortRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteShortRef</code> provides a remote interface
- * for manipulating short integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteShortRef {
- def elem_=(value: Short)
- def elem: Short
-}
-
-/**
- * The class <code>RemoteShortRefImpl</code> implements a remote (global)
- * short integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ShortRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteShortRefImpl(name: String, x: ShortRef)
-extends UnicastRemoteObject with RemoteShortRef with Unreferenced {
- def elem_=(value: Short) { x.elem = value }
- def elem: Short = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteShortRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala
deleted file mode 100644
index f9a3d80da4..0000000000
--- a/src/detach/plugin/scala/tools/detach/Detach.scala
+++ /dev/null
@@ -1,1190 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.tools.detach
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.tools.nsc._
-import scala.tools.nsc.plugins.PluginComponent
-import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.transform._
-
-abstract class Detach extends PluginComponent
- with Transform with TypingTransformers {
- import global._
- import definitions._
-
- /** the following two members override abstract members in Transform */
- val phaseName: String = "detach"
-
- protected def newTransformer(unit: CompilationUnit): Transformer =
- new DetachTransformer(unit)
-
- // set with the `-P:detach:enable` plugin option (see DetachPlugin) */
- protected[detach] var isEnabled = false
-
- private class DetachTransformer(unit: CompilationUnit)
- extends TypingTransformer(unit) {
- private val DEBUG = settings.debug.value
- private val PROXY_PREFIX = "proxy$" // local proxy objects
- private val PROXY_SUFFIX = "$proxy" // top-level proxy classes
- private val DETACH_SUFFIX = "$detach" // detached closures
- private val IMPL_SUFFIX = "Impl" // follows Java convention
-
- private val nme_bind = newTermName("bind")
- private val nme_unbind = newTermName("unbind")
- private val nme_unreferenced = newTermName("unreferenced")
-
- private val Functions = FunctionClass.toList // see method isFuncType
-
- private val RemoteClass =
- definitions.getClass("java.rmi.Remote")
-
- private val UIDClass =
- definitions.getClass("java.rmi.server.UID")
-
- private val UnicastRemoteObjectClass =
- definitions.getClass("java.rmi.server.UnicastRemoteObject")
-
- private val UnreferencedClass =
- definitions.getClass("java.rmi.server.Unreferenced")
-
- private val DetachModule =
- definitions.getModule("scala.remoting.detach")
-
- private val DebugModule =
- definitions.getModule("scala.remoting.Debug")
-
- private val RemoteRefModule =
- definitions.getModule("scala.runtime.RemoteRef")
-
- private val ThreadModule =
- definitions.getModule("java.lang.Thread")
-
- private val UnicastRemoteObjectModule =
- definitions.getModule("java.rmi.server.UnicastRemoteObject")
-
- private val remoteAnnotationInfo = {
- val RemoteAttr: Symbol = definitions.getClass("scala.remote")
- AnnotationInfo(RemoteAttr.tpe, List(), List())
- }
-
- private val serializableAnnotationInfo =
- AnnotationInfo(SerializableAttr.tpe, List(), List())
-/*
- private val throwsAnnotationInfo = {
- val RemoteExceptionClass = definitions.getClass("java.rmi.RemoteException")
- val ThrowsAttr = definitions.getClass("scala.throws")
- AnnotationInfo(
- ThrowsAttr.tpe,
- List(Literal(Constant(RemoteExceptionClass.tpe))),
- List()
- )
- }
-*/
- // todo: see generation of Java version UID
- private def serialVersionUIDAnnotationInfo(clazz: Symbol) = {
- def genHash(sym: Symbol): Long = {
- val sym1 = if (sym.isConstructor) sym.owner else sym
- val ts = sym.tpe match {
- case MethodType(params, rt) => (params map (_.tpe)) ::: List(rt)
- case t => List(t)
- }
- val hashes = sym1.nameString.hashCode ::
- (ts map (_.typeSymbol.nameString.hashCode))
- (0L /: hashes)((acc, h) => acc ^ h)
- }
- val hashes = for (sym <- clazz.info.decls.toList) yield genHash(sym)
- val uid: Long = (0L /: hashes) ((acc, h) => acc * 41 + h)
- val serialVersionUIDAttr = definitions.getClass("scala.SerialVersionUID")
- AnnotationInfo(
- serialVersionUIDAttr.tpe,
- List(Literal(Constant(uid))),
- List()
- )
- }
-
- private def elems(suffix: String): List[(Symbol, Symbol)] =
- for (clazz <- ObjectRefClass :: refClass.valuesIterator.toList) yield {
- val name = "scala.runtime.remoting.Remote" + clazz.name + suffix
- (clazz, definitions.getClass(name))
- }
- private val remoteRefClass = immutable.HashMap(elems(""): _*)
- private val remoteRefImpl = immutable.HashMap(elems("Impl"): _*)
-
- private val proxyInterfaceDefs = new mutable.HashMap[Symbol/*owner*/, ListBuffer[Tree]]
- private val detachedClosureApply = new mutable.HashMap[Tree, Apply]
-
- private type SymSet = mutable.HashSet[Symbol]
- private val capturedObjects = new mutable.HashMap[Symbol/*clazz*/, SymSet]
- private val capturedFuncs = new mutable.HashMap[Symbol/*clazz*/, SymSet]
- private val capturedCallers = new mutable.HashMap[Symbol/*clazz*/, SymSet]
- private val capturedThisClass = new mutable.HashMap[Symbol, Symbol]
-
- private val proxies = new mutable.HashMap[
- Symbol, //clazz
- (Symbol, Symbol, mutable.HashMap[Symbol, Symbol]) //iface, impl, accessor map
- ]
- def toInterface(clazz: Symbol) = proxies(clazz)._1
- private val classdefs = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
- // detachedClosure gathers class definitions containing a "detach" apply
- private val detachedClosure = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
-
- /** <p>
- * The method <code>freeObjTraverser.traverse</code> is invoked
- * in the method <code>DetachPlugin.transformUnit</code> in order to
- * gather information about objects referenced inside a detached
- * closure and which will be accessed remotely through object proxies.
- * </p>
- * <p>
- * Object proxies are generated in method <code>mkClosureApply</code>
- * and their definitions are generated in method <code>genProxy</code>.
- * </p>
- */
- private val freeObjTraverser = new Traverser {
- def symSet(f: mutable.HashMap[Symbol, SymSet], sym: Symbol): SymSet = f.get(sym) match {
- case Some(ss) => ss
- case None => val ss = new mutable.HashSet[Symbol]; f(sym) = ss; ss
- }
- def getClosureApply(tree: Tree): Apply = tree match {
- case Block(_, expr) => getClosureApply(expr)
- case Typed(expr, _) => getClosureApply(expr)
- case apply @ Apply(Select(_, _), _) => apply // sel="<init>" or some "f$0"
- case Apply(fun, _) => getClosureApply(fun)
- case _ =>
- throw new Error("getClosureApply: unhandled case " + tree)
- }
- def isFuncType(tp: Type): Boolean = tp match {
- case TypeRef(pre, sym, args) =>
- Functions contains sym.tpe.typeSymbol
- case _ =>
- false
- }
- def isOuterMember(sym: Symbol): Boolean =
- sym.isOuterAccessor ||
- sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/)
- override def traverse(tree: Tree) {
- val sym = tree.symbol
- val owner =
- if (currentOwner.isModule) currentOwner
- else currentOwner.enclClass
- tree match {
- case cdef @ ClassDef(_, _, _, impl) =>
- classdefs(sym) = cdef
- super.traverse(impl)
- if (detachedClosure contains sym) {
- detachedClosure(sym) = cdef
- symSet(capturedObjects, sym) += capturedThisClass(sym)
- }
-
- case Apply(Select(qual, _), List(arg))
- if (qual.tpe <:< DetachModule.tpe) =>
- assert(isFuncType(arg.tpe))//debug
- val t = getClosureApply(arg)
- if (!t.fun.symbol.isConstructor)
- unit.error(t.pos, "detach inapplicable for " +t.fun.symbol)
- val sym = t.fun.symbol.owner
- capturedThisClass(sym) = owner
- symSet(capturedFuncs, sym)
- detachedClosureApply(tree) = t
- classdefs get sym match {
- case None =>
- detachedClosure(sym) = null // set later in case ClassDef
- case Some(cdef) =>
- detachedClosure(sym) = cdef
- symSet(capturedObjects, sym) += capturedThisClass(sym)
- }
- super.traverse(arg)
-
- case Select(qual @ This(_), name)
- if qual.symbol.isModuleClass && !qual.symbol.isPackageClass =>
- val qsym = qual.symbol
- symSet(capturedFuncs, owner) += sym
- symSet(capturedObjects, owner) += qsym
-
- case Select(qual, name)
- if (qual.hasSymbol &&
- (sym.owner != owner) &&
- !(sym.ownerChain contains ScalaPackageClass) &&
- !(sym.owner hasFlag JAVA)) =>
- val qsym = qual.symbol
- symSet(capturedFuncs, owner) += sym
- if (qsym.isStaticModule && !qsym.isPackage) {
- //println("*****1******* capturedObjects("+owner+") += "+qsym)
- symSet(capturedObjects, owner) += qsym
- }
- else if (!isOuterMember(qsym) && !(qsym isNestedIn owner)) {
- //println("*****3******* capturedCallers("+sym+") += "+qsym)
- symSet(capturedCallers, sym) += qsym
- }
-
- case _ =>
- super.traverse(tree)
- }
- }
- } //freeObjTraverser
-
- private val valueClass = immutable.HashMap(
- (for ((sym, ref) <- refClass.toList) yield (ref, sym)): _*
- ) + (ObjectRefClass -> ObjectClass)
-
- private def toValueClass(tp: Type): Type =
- if (isRefClass(tp)) valueClass(tp.typeSymbol).tpe
- else if (proxies contains tp.typeSymbol) toInterface(tp.typeSymbol).tpe
- else tp
-
- private def isRefClass(tp: Type): Boolean =
- (tp ne null) &&
- ((refClass.valuesIterator contains tp.typeSymbol) || (ObjectRefClass eq tp.typeSymbol))
-
- private def isRemoteRefClass(tp: Type): Boolean =
- (tp ne null) && (remoteRefClass.valuesIterator contains tp.typeSymbol)
-
- private def mkRemoteRefClass(tp: Type): Type = {
- assert(isRefClass(tp))
- val tp1 = remoteRefClass(tp.typeSymbol)
- typeRef(tp1.typeConstructor.prefix, tp1, Nil) // after erasure, no type anymore!
- }
-
- class TreeOuterSubstituter(from: List[Symbol], to: List[Symbol]) extends Traverser {
- if (DEBUG)
- println("\nTreeOuterSubstituter:"+
- "\n\tfrom="+from.mkString(",")+
- "\n\tto="+to.mkString(","))
- val substMap = new mutable.HashMap[Symbol, Symbol]
- override def traverse(tree: Tree) {
- def subst(from: List[Symbol], to: List[Symbol]) {
- if (!from.isEmpty)
- if (tree.symbol.tpe == from.head.tpe) {
- if (DEBUG)
- println("\nTreeOuterSubstituter\n\tsym="+tree.symbol+
- ", tpe="+tree.symbol.tpe+
- "\n\towner="+tree.symbol.owner)
- tree.symbol updateInfo to.head.tpe
- }
- else tree.symbol.tpe match {
- case MethodType(params, restp) =>
- for (p <- params if p.tpe == from.head.tpe) {
- p updateInfo to.head.tpe
- }
- if (restp == from.head.tpe) {
- if (DEBUG)
- println("\nTreeOuterSubstituter(2)\n\tsym="+tree.symbol+
- ", tpe="+tree.symbol.tpe+
- ", owner="+tree.symbol.owner)
- tree.symbol updateInfo MethodType(params, to.head.tpe)
- }
- case _ =>
- subst(from.tail, to.tail)
- }
- }
- def isOuter(sym: Symbol): Boolean =
- sym.isOuterAccessor ||
- sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/)
- if (tree.hasSymbol && isOuter(tree.symbol)) subst(from, to)
- super.traverse(tree)
- }
- }
-
- // based on class Trees.TreeTypeSubstituter
- private class TreeTypeRefSubstituter(clazz: Symbol) extends Traverser {
- override def traverse(tree: Tree) {
- val sym = tree.symbol
- if (tree.hasSymbol && isRefClass(sym.tpe) &&
- (sym.owner.enclClass == clazz) &&
- (sym.isValueParameter || sym.hasFlag(PARAMACCESSOR))) {
- sym setInfo mkRemoteRefClass(sym.tpe)
- tree.tpe = sym.tpe
- }
- if (isRefClass(tree.tpe))
- tree.tpe = mkRemoteRefClass(tree.tpe)
- super.traverse(tree)
- }
- override def apply[T <: Tree](tree: T): T = super.apply(tree)
- }
-
- private class TreeOwnerSubstituter(from: Symbol, to: Symbol) extends Traverser {
- def substType(sym: Symbol): Type = {
- def subst(tpe: Type): Type = tpe match {
- case MethodType(params, restp) =>
- println("TreeOwnerSubstituter[1]: tpe="+tpe+
- ", tpe.typeSymbol="+tpe.typeSymbol+", sym="+sym)//debug
- for (p <- params if p.tpe == from.tpe) {
- println("TreeOwnerSubstituter[2]: sym="+sym+
- ", sym.owner="+sym.owner+", p.tpe="+p.tpe)//debug
- p updateInfo to.tpe
- }
- MethodType(params, subst(restp))
- case _ =>
- if (sym.owner == from && tpe == from.tpe) {
- println("TreeOwnerSubstituter[3]: sym="+sym+
- ", owner="+sym.owner+", tpe="+tpe)//debug
- to.tpe
- } else tpe
- }
- subst(sym.tpe)
- }
- val map = new mutable.HashMap[Symbol, Symbol]
- override def traverse(tree: Tree) {
- if (tree.hasSymbol && tree.symbol != NoSymbol) {
- val sym = tree.symbol
- if (sym.owner == from) {
- val sym1 = map get sym match {
- case Some(s) => s
- case None => val s = sym.cloneSymbol(to); map(sym) = s; s
- }
- tree setSymbol sym1
- }
- val sym1 = tree.symbol
- val tp = substType(sym1)
- if (tp != sym1.tpe) {
- if (sym1.owner == to)
- println("\n%%%%%1%%%%%%% TreeOwnerSubst: tree="+tree+", sym1="+sym1+", sym1.owner="+sym1.owner)//debug
- sym1 setInfo tp
- tree setSymbol sym1
- }
- }
- super.traverse(tree)
- }
- //override def apply[T <: Tree](tree: T): T = super.apply(tree/*.duplicate*/)
- }
-
- private var inConstructorFlag = 0L
-
- private def isCaptured(clazz: Symbol, sym: Symbol): Boolean =
- if (capturedFuncs contains clazz) {
- //log("**1** isCaptured: clazz="+clazz+", sym="+sym+", ")
- capturedFuncs(clazz) contains sym
- }
- else {
- //log("**2** isCaptured: clazz="+clazz+", sym="+sym)
- sym.isMethod && !sym.isConstructor
- }
-
- private class TreeAccessorSubstituter(clazz: Symbol, objs: List[Symbol], proxySyms: List[Symbol])
- extends Transformer {
- def removeAccessors(tree: Tree): Tree = tree match {
- case Apply(fun, _) =>
- removeAccessors(fun)
- case Select(qual, _) if tree.hasSymbol && tree.symbol.isOuterAccessor =>
- removeAccessors(qual)
- case _ =>
- tree
- }
- if (DEBUG)
- println("\nTreeAccessorSubstituter: "+
- "\n\tobjs="+objs.mkString(",")+
- "\n\tproxies="+proxySyms.mkString(","))
- override def transform(tree: Tree): Tree = tree match {
- // transforms field assignment $outer.i$1.elem=..
- // into setter $outer.i$1_=(..)
- case Assign(lhs @ Select(qual1 @ Select(qual, name), name1), rhs)
- if qual1.hasSymbol && !qual1.symbol.isPrivateLocal &&
- isRemoteRefClass(qual1.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Assign1\n\tqual1="+qual1+", sel.tpe="+lhs.tpe+
- "\n\tqual1.tpe="+qual1.tpe+", name1="+name1+
- "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug
- val iface = toInterface(qual.tpe.typeSymbol)
- val sym = iface.tpe.decls lookup nme.getterToSetter(name)
- atPos(tree.pos)(Apply(
- Select(super.transform(qual), sym) setType lhs.tpe,
- List(super.transform(rhs))
- ) setType tree.tpe)
-
- // transforms local assignment this.x$1.elem=..
- // into setter method this.x$1_=(..)
- case Assign(lhs @ Select(qual, name), rhs)
- if qual.hasSymbol && qual.symbol.isPrivateLocal &&
- isRemoteRefClass(qual.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Assign2"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tname="+name)
- // substitute the 'elem' member of the reference class with
- // the corresponding setter method of the remote reference class.
- val qual1 = super.transform(qual)
- val sym = qual1.tpe.decls lookup nme.getterToSetter(name)
- val fun = gen.mkAttributedSelect(qual1, sym)
- Apply(fun, List(super.transform(rhs))) setType lhs.tpe
-
- case Assign(Select(qual, name), rhs)
- if qual.hasSymbol && (objs contains qual.symbol) =>
- val sym = qual.symbol
- val proxy = proxySyms(objs indexOf sym)
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Assign3"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
- "\n\tname="+name)//debug
- // substitute the member accessor of the enclosing class with
- // the corresponding setter method of the detached interface.
- val iface = toInterface(sym)
- val substSymbols = new TreeSymSubstituter(
- sym.info.decls.toList filter { isCaptured(sym, _) },
- iface.info.decls.toList)
- substSymbols(Apply(
- Select(Ident(proxy), nme.getterToSetter(name)),
- List(super.transform(rhs))))
-
- // transforms setter invocation this.i$1_=(..)
- // into setter invocation $outer.i$1_=(..)
- case Apply(Select(qual @ This(_), name), args)
- if (objs contains qual.symbol) && nme.isSetterName(name) =>
- val proxy = proxySyms(objs indexOf qual.symbol)
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Apply"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
- "\n\tname="+name+", decoded="+name.decode)
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
- val sym1 = proxy.info.decls lookup name.decode
- val fun = gen.mkAttributedSelect(qual1, sym1)
- Apply(fun, args map (super.transform(_))) setType tree.tpe
-
- // transforms access to field this.name$1
- // into invocation of getter method $outer.name$1()
- case Select(qual @ This(_), name)
- if objs contains qual.symbol =>
- val proxy = proxySyms(objs indexOf qual.symbol)
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
- "\n\tname="+name+", decoded="+name.decode)
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
- val sym1 = proxy.info.decls lookup nme.originalName(name) //name
- gen.mkAttributedSelect(qual1, sym1)
-
- // transforms field $outer.name$1 into getter method $outer.name$1()
- case Select(qual @ Select(_, name1), name)
- if qual.hasSymbol && name1.endsWith(nme.OUTER/*, nme.OUTER.length*/) &&
- !tree.symbol.isMethod =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select0\n\tqual="+qual+
- ", qual.tpe="+qual.tpe+", name="+name)//debug
- val sym = qual.symbol
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(sym.owner), sym)
- val iface = toInterface(qual.tpe.typeSymbol)
- val sym1 = iface.tpe.decls lookup name
- val fun = gen.mkAttributedSelect(qual1, sym1)
- Apply(fun, List()) setType tree.tpe
-
- case Select(apply @ Apply(fun @ Select(qual, _), _), name)
- if fun.symbol.isOuterAccessor =>
- val tsym = fun.symbol.tpe.resultType.typeSymbol
- val funcs = capturedFuncs(clazz).toList filter (sym =>
- (tsym.ownerChain contains sym.owner) || (tsym isSubClass sym.owner))
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select1\n\tfun="+fun+
- ",\n\tfun.tpe="+fun.tpe+", name="+name+
- ",\n\tfuncs="+funcs)//debug
- funcs find (tree.symbol.==) match {
- case Some(sym) =>
- val qual1 =
- if (currentOwner.enclClass isNestedIn clazz) apply
- else removeAccessors(qual)
- val name1 =
- (if (tsym isSubClass qual1.tpe.typeSymbol) ""
- else tsym.fullName('$')+"$")+sym.name
- val iface = toInterface(qual1.tpe.typeSymbol)
- val sym1 = iface.tpe.decls lookup name1
- gen.mkAttributedSelect(qual1, sym1)
- case None =>
- super.transform(tree)
- }
-
- // transforms field access $outer.i$1.elem
- // into invocation of getter method $outer.i$1()
- case Select(qual @ Select(qual1, name1), name)
- if qual.hasSymbol && !qual.symbol.isPrivateLocal &&
- isRemoteRefClass(qual.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select2\n\tqual="+qual+
- "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug
- val iface = toInterface(qual.symbol.owner)
- val sym1 = iface.tpe.decls lookup name1
- val fun = gen.mkAttributedSelect(qual1, sym1)
- Apply(fun, List()) setType tree.tpe
-
- // transforms local access this.i$1.elem
- // into invocation of getter method this.i$1()
- case Select(qual, name)
- if qual.hasSymbol && qual.symbol.isPrivateLocal &&
- isRemoteRefClass(qual.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select3\n\tqual="+qual+
- "\n\tqual.tpe="+qual.tpe)//debug
- val sym = qual.tpe.decls lookup name
- val fun = gen.mkAttributedSelect(qual, sym)
- Apply(fun, List()) setType tree.tpe
-
- case Select(qual, name)
- if qual.hasSymbol && (objs contains qual.symbol) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select4\n\tqual="+qual+
- ", qual.tpe="+qual.tpe+", name="+name)//debug
- val sym = qual.symbol
- val proxy = proxySyms(objs indexOf sym)
- // substitute the accessor of a member of the enclosing class
- // with the corresponding accessor of the detached interface
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
- val iface = toInterface(sym)
- val sym1 = iface.tpe.decls lookup name.decode
- gen.mkAttributedSelect(qual1, sym1)
-
- case _ =>
- super.transform(tree)
- }
- def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
- } // TreeAccessorSubstituter
-/*
- private class TreeNameSubstituter(from: Name, to: Symbol) extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Super(qual, mix) if tree.symbol.name == from =>
- Super(qual, mix) setSymbol to
- case This(name) if name == from =>
- This(to.name) setSymbol to
- case _ =>
- super.transform(tree)
- }
- def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
- }
-*/
- /** <p>
- * Given the closure definition (generated by previous phases)
- * </p><pre>
- * class $anonfun$1 extends Object with Function1 {
- * def this($outer: C, x$1: Int): $anonfun$1 = ..
- * def apply(x: Int): Int = x + this.$outer.x() + this.x$1
- * }</pre>
- * <p>
- * the method <code>mkClosureDef</code> transforms the above code
- * to the following:
- * </p><pre>
- * @serializable
- * class $anonfun$1$detach extends Object with Function1 {
- * def this($outer: C$proxy, x$1: Int): $anonfun$1$detach = ..
- * def apply(x: Int): Int = x + this.$outer.x() + this.x$1
- * }</pre>
- * <p>
- * In particular, it performs the following operations:
- * 1) add constructor parameter <code>proxy_n</code> to access
- * proxy of the enclosing class
- * 2) change reference types in constructor arguments to type
- * <code<Remote_type_Ref</code>'
- * 3) change occurences of <code>this</code> identifier to
- * <code>proxy_n</code> in template code
- * 4) change reference types of local value definitions associated
- * to updated constructor arguments to type <code>Remote_type_Ref</code>
- * </p>
- *
- * @param clazz the symbol of the original closure definition
- * @return the typed class definition for the detached closure.
- */
- private def mkClosureDef(clazz: Symbol): Tree = {
- val cdef = detachedClosure(clazz)
- val name = cdef.symbol.name
- if (name endsWith DETACH_SUFFIX)
- return cdef // closure already detached
-
- clazz.name = encode(clazz.name.decode + DETACH_SUFFIX)
- clazz addAnnotation serialVersionUIDAnnotationInfo(clazz)
- clazz addAnnotation serializableAnnotationInfo
-
- val thiz = capturedThisClass(clazz)
- val (List(outer), captured) =
- capturedObjects(clazz).toList partition (thiz.==)
-
- /** <p>
- * Method <code>updateConstructorParams</code> updates the class
- * symbol of the detached closure as follows:
- * 1) it appends the "$detach" suffix to the class name,
- * 2) it adds the "@serializable" annotation to class attributes,
- * 3) it adds a parameter symbol for each element of "captured".
- * </p>
- * <p>
- * and also updates the signature of the constructor symbol:
- * 1) it adds a parameter type for each element of "captured",
- * 2) it changes reference types to remote reference types.
- * </p>
- */
- def updateConstructorParams(vparams: List[ValDef]): List[Symbol] = {
- val hasOuter = !vparams.isEmpty && (vparams.head.symbol.tpe == thiz.tpe)
- val ctor = clazz.primaryConstructor
- val params = (for (sym <- captured) yield {
- val iface = toInterface(sym)
- val param = ctor.newValueParameter(ctor.pos, freshProxyName)
- .setFlag(SYNTHETIC)
- .setInfo(iface.tpe)
- param.owner = ctor
- param
- }) ::: (
- if (hasOuter) Nil
- else {
- val iface = toInterface(thiz)
- val param = ctor.newValueParameter(ctor.pos, nme.OUTER)
- .setFlag(SYNTHETIC)
- .setInfo(iface.tpe)
- param.owner = ctor
- List(param)
- }
- )
- val tp = ctor.tpe match {
- case mt @ MethodType(params1, restp) =>
- val params2 = if (hasOuter) {
- val iface = toInterface(params1.head.tpe.typeSymbol)
- ctor.newSyntheticValueParam(iface.tpe) :: params1.tail
- }
- else params1
- for (p <- params2 if isRefClass(p.tpe)) {
- p updateInfo mkRemoteRefClass(p.tpe)
- }
- MethodType(params ::: params2, restp)
- case tp =>
- tp
- }
- ctor updateInfo tp
- params
- } //updateConstructorParams
-
- /**
- */
- def updateConstructorDef(ctor: DefDef): (List[Tree], List[Symbol]) = {
- val DefDef(mods, name, tparams, List(vparams), tpt, rhs) = ctor
- val newparams = updateConstructorParams(vparams)
- val vparams0 = newparams map (sym => ValDef(sym) setType sym.tpe)
- val ctorDef = treeCopy.DefDef(ctor, mods, name, tparams, List(vparams0 ::: vparams), tpt, rhs)
- val accessors = for (sym <- newparams) yield {
- val acc = clazz.newValue(sym.pos, sym.name)
- .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
- .setInfo(sym.tpe)
- clazz.info.decls enter acc
- acc
- }
- val accDefs = accessors map (sym => ValDef(sym) setType sym.tpe)
- (ctorDef :: accDefs, accessors)
- } //updateConstructorDef
-
- val impl = cdef.impl
- val (List(ctor: DefDef), body1) = impl.body partition (t =>
- t.isDef && t.symbol.isPrimaryConstructor)
- val (defs, accessors) = updateConstructorDef(ctor)
- val impl1 = treeCopy.Template(impl, impl.parents, impl.self, defs ::: body1)
- val (from, to) = /*List.unzip*/(
- for (obj <- captured ::: List(outer))
- yield (obj, toInterface(obj))
- ) unzip
- //val substNames = new TreeNameSubstituter(name, clazz)
- val substTypeRefs = new TreeTypeRefSubstituter(clazz)
- val substAccs = new TreeAccessorSubstituter(clazz, from, accessors)
- val substTypes = new TreeOuterSubstituter(from, to)
- val substSyms = new TreeSymSubstituter(from, to)
- val t1 = ClassDef(clazz, substSyms(substTypes(substAccs(substTypeRefs(impl1)))))
- //println("mkClosureDef: t(untyped)=\n"+nodeToString(t1))
- val t = localTyper typed t1
- detachedClosure(clazz) = t.asInstanceOf[ClassDef]
- //println("mkClosureDef: t(typed)=\n"+nodeToString(t))
- t
- } //mkClosureDef
-
- /** <p>
- * Given a class <code>C</code> with member <code>x</code>
- * which is (remotely) referenced from inside a detached closure:
- * </p><pre>
- * class C extends .. {
- * var x: Int
- * }</pre>
- * <p>
- * the method <code>addProxy</code> generates the following two
- * proxy definitions (used later in method <code>mkClosureApply</code>
- * to generate object proxies):
- * </p><pre>
- * trait C$proxy extends java.rmi.Remote {
- * def x(): Int
- * def x_=(x$1: Int): Unit
- * }
- * class C$proxyImpl
- * extends java.rmi.server.UnicastRemoteObject
- * with C$proxy with java.rmi.server.Unreferenced {
- * def this(x$0: String, x$1: C): C$ProxyImpl = ..
- * def x(): Int = this.x$1.x()
- * def x_=(x$1: Int): Unit = this.x$1.x_=(x$1)
- * def unreferenced(): Unit = RemoteRef.unbind(this.x$0)
- * }</pre>
- */
- private def addProxy(closure: Symbol, clazz: Symbol) {
- // the Sun RMI compiler crashes with the error message
- // "error: An error has occurred in the compiler; ..." with trace
- // "sun.tools.java.CompilerError: getInnerClassField" if the
- // generated proxy class does not belong to the top-level scope.
- val proxyOwner = clazz.toplevelClass.owner //clazz.owner
-
- if (DEBUG)
- println("\nadd proxy for "+clazz+" in "+proxyOwner)//debug
-
- val (proxyIntf, proxyImpl, proxyMap) = proxies get clazz match {
- case Some(proxy) =>
- proxy
- case None =>
- val iface =
- proxyOwner.newClass(clazz.pos, encode(clazz.name.decode + PROXY_SUFFIX))
- iface.sourceFile = clazz.sourceFile
- iface setFlag (ABSTRACT | TRAIT | INTERFACE) // Java interface
- val iparents = List(ObjectClass.tpe, RemoteClass.tpe)
- iface setInfo ClassInfoType(iparents, newScope, iface)
- // methods must throw RemoteException
- iface addAnnotation remoteAnnotationInfo
-
- val iclaz =
- proxyOwner.newClass(clazz.pos, encode(iface.name.decode + IMPL_SUFFIX))
- iclaz.sourceFile = clazz.sourceFile
- iclaz setFlag (SYNTHETIC | FINAL)
- // Variant 1: rebind/unbind
- val cparents = List(UnicastRemoteObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
- // Variant 2: un-/exportObject
- //val cparents = List(ObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
- iclaz setInfo ClassInfoType(cparents, newScope, iclaz)
- val proxy = (iface, iclaz, new mutable.HashMap[Symbol, Symbol])
- proxies(clazz) = proxy
- proxy
- }
-
- def addAccessors() {
- def mkGetter(sym: Symbol, name: String): Symbol = {
- val getter = if (sym.isMethod) {
- val meth = sym.cloneSymbol(proxyIntf)
- meth.name = name
- val tsym = meth.tpe.resultType.typeSymbol
- if (proxies contains tsym)
- meth updateInfo MethodType(List(), toInterface(tsym).tpe)
- meth
- }
- else {
- val meth = proxyIntf.newMethod(sym.pos, nme.getterName(sym.originalName))
- meth setFlag ACCESSOR
- meth setInfo MethodType(List(), toValueClass(sym.tpe))
- meth
- }
- getter setFlag ABSTRACT
- getter resetFlag FINAL
- getter
- }
- def mkSetter(sym: Symbol): Symbol = {
- val setter = proxyIntf.newMethod(sym.pos, nme.getterToSetter(sym.originalName))
- setter setFlag (sym.flags & ~(PRIVATE | LOCAL) | ACCESSOR | lateDEFERRED)
- val param = setter.newSyntheticValueParam(toValueClass(sym.tpe))
- setter setInfo MethodType(List(param), UnitClass.tpe)
- setter setFlag ABSTRACT
- setter resetFlag FINAL
- setter
- }
- def create(owner: Symbol, clazz: Symbol) {
- val funcs = capturedFuncs(owner).toList
- funcs find (_.isConstructor) match {
- case Some(sym) if capturedFuncs contains sym.owner =>
- create(sym.owner, clazz)
- case _ =>
- }
- val newfuncs = funcs filterNot (proxyMap.valuesIterator.toList contains)
- val (members, others) = newfuncs partition (clazz isSubClass _.owner)
- val outers = others filter (sym =>
- (clazz isNestedIn sym.owner) && clazz.isClass)
- for (sym <- outers) {
- val sym1 = mkGetter(sym, sym.fullName('$'))
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }/*
- for (sym <- outers if capturedCallers contains sym;
- caller <- capturedCallers(sym)) {
- val sym1 = mkGetter(sym, caller.nameString+'$'+sym.nameString)
- if (clazz.isAnonymousClass)
- println("[2] clazz="+clazz+", sym1="+sym1)
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }*/
- for (sym <- members if !sym.isConstructor) {
- val sym1 = mkGetter(sym, sym.originalName.decode)
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }
- for (sym <- members if isRefClass(sym.tpe)) {
- val sym1 = mkSetter(sym)
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }
- }
- create(closure, clazz)
- }
-
- addAccessors
- if (DEBUG) {
- val xs = proxyMap.keysIterator.toList
- println("\tadded "+proxyIntf+
- "\n\twith "+xs.mkString(", ")+" ["+xs.length+"]")
- }
- } //addProxy
-
- def genProxy(clazz: Symbol) {
- val (proxyIntf, proxyImpl, proxyMap) = proxies(clazz)
-
- // generate proxy interface
- val ifaceBody = proxyMap.keysIterator.toList map { DefDef(_, EmptyTree) }
- val ifaceParents =
- proxyIntf.info.parents map (t => TypeTree(t) setPos proxyIntf.pos)
- val ifaceTmpl = Template(ifaceParents, emptyValDef, ifaceBody)
- val ifaceDef = localTyper typed ClassDef(proxyIntf, ifaceTmpl)
-
- // generated proxy implementation
- // Variant 1: rebind/unbind
- val param1 =
- proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$"))
- .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
- .setInfo(StringClass.tpe)
- proxyImpl.info.decls enter param1
-
- val param2 =
- proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$"))
- .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
- .setInfo(clazz.tpe)
- proxyImpl.info.decls enter param2
-
- val unreferenced =
- proxyImpl.newMethod(proxyImpl.pos, nme_unreferenced)
- .setInfo(MethodType(List(), UnitClass.tpe))
- proxyImpl.info.decls enter unreferenced
-
- val proxyBody =
- DefDef(unreferenced, List(List()), Block(
- List(Apply( //stats
- Select(gen.mkAttributedRef(DebugModule), "info"),
- List(Apply(
- Select(Literal(Constant("unreferenced: ")), "$plus"),
- // Variant 1: rebind/unbind
- List(Select(This(proxyImpl), param1.name))
- // Variant 2: un-/exportObject
- //List(This(proxyImpl))
- ))
- )),
- Apply( //expr
- Select(gen.mkAttributedRef(RemoteRefModule), nme_unbind),
- // Variant 1: rebind/unbind
- List(Select(This(proxyImpl), param1.name))
- // Variant 2: un-/exportObject
- //List(This(proxyImpl))
- )
- )) :: (
- for (sym <- proxyIntf.info.decls.toList) yield {
- val sym1 = sym.cloneSymbol(proxyImpl)
- sym1 resetFlag (ABSTRACT | DEFERRED | lateDEFERRED)
- proxyImpl.info.decls enter sym1
- DefDef(sym1, {
- val sym2 = proxyMap(sym)
- var t = Select(This(proxyImpl), param2)
- var outerAcc =
- if (sym2.owner isSubClass param2) None
- else param2.info.decls.toList find (_.isOuterAccessor)
- while (!outerAcc.isEmpty) {
- t = Select(t, outerAcc.get)
- val outerClass = outerAcc.get.tpe.resultType.typeSymbol
- outerAcc =
- if (sym2.owner == outerClass) None
- else outerClass.info.decls.toList find (_.isOuterAccessor)
- }
- val sel = Select(t, sym2)
- if (sym2.isMethod) {
- Apply(sel, sym1.paramss(0) map { Ident(_) })
- }
- else if (isRefClass(sym2.tpe)) {
- val sel1 = Select(sel, nme.elem)
- if (sym1.tpe.paramTypes.length == 0) sel1
- else Assign(sel1, Ident(sym1.paramss(0)(0)))
- }
- else
- sel
- })
- })
- val proxyParents =
- proxyImpl.info.parents map (t => TypeTree(t) setPos proxyImpl.pos)
- val proxyTmpl = Template(proxyParents,
- emptyValDef, NoMods,
- // Variant 1: rebind/unbind
- /*vparamss*/ List(List(ValDef(param1), ValDef(param2))),
- // Variant 2: un-/exportObject
- ///*vparamss*/ List(List(ValDef(param2))),
- /*argss*/ List(List()), proxyBody, NoPosition)
- val proxyDef = localTyper typed ClassDef(proxyImpl, proxyTmpl)
-
- // remember definitions to be added by transformStats
- val proxyOwner = proxyIntf.owner
- if (! (proxyInterfaceDefs contains proxyOwner))
- proxyInterfaceDefs(proxyOwner) = new ListBuffer
- proxyInterfaceDefs(proxyOwner) += ifaceDef
- proxyInterfaceDefs(proxyOwner) += proxyDef
- } //genProxy
-
- private def freshName(s: String): Name =
- unit.fresh.newName(s)
-
- private def freshProxyName: Name =
- unit.fresh.newName(PROXY_PREFIX)
-
- /** <p>
- * Given a detached closure applied in some environment consisting
- * of an enclosing class <code>C</code> and some local variables
- * <code>x$1</code> (immutable) and <code>y$1</code> (mutable):
- * </p><pre>
- * scala.remoting.detach.apply({
- * (new $anonfun$1(C.this, x$1, y$1): Function1)
- * })</pre>
- * <p>
- * the above code is transformed to the following block:
- * </p><pre>
- * {
- * val proxy$1: C$Proxy =
- * RemoteRef.bind("C/proxy$1", new C$ProxyImpl(C.this))
- * val proxy$2: RemoteIntRef =
- * RemoteRef.bind("C/proxy$2", new RemoteIntRefImpl(y$1))
- * (new $anonfun$1detach(proxy$1, x$1, proxy$2): Function1)
- * }
- * </pre>
- */
- private def mkClosureApply(tree: Tree): Tree = {
- val apply @ Apply(fun, args) = detachedClosureApply(tree)
- assert(fun.symbol.isConstructor, fun.symbol+" is not a constructor")//debug
- val clazz = apply.tpe.typeSymbol
- val thiz = capturedThisClass(clazz)
- val cdef = mkClosureDef(clazz)
- val uid = localTyper typed {
- val sym = currentOwner.newValue(tree.pos, freshName("uid$"))
- .setFlag(SYNTHETIC)
- .setInfo(StringClass.tpe)
- val rhs = Apply(Select(
- Apply(
- Select(New(TypeTree(UIDClass.tpe)), nme.CONSTRUCTOR),
- List()
- ),
- "toString"
- ), List())
- ValDef(sym, rhs)
- }
- def cast(tree: Tree, tpe: Type): Tree =
- Apply(
- TypeApply(
- Select(tree, Object_asInstanceOf),
- List(TypeTree(tpe))
- ),
- List()
- )
-
- def mkProxy(csym: Symbol): ValDef = {
- val (iface, proxy, _) = proxies(csym)
- val sym = currentOwner.newValue(csym.pos, freshProxyName)
- .setFlag(SYNTHETIC)
- .setInfo(iface.tpe)
- val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind)
- val name = Apply(
- Select(Literal(Constant(sym.fullName('/')+"$")), String_+),
- List(Ident(uid.symbol))
- )
- val thiz =
- if (csym.isModule) gen.mkAttributedIdent(csym)
- else gen.mkAttributedThis(csym)
- val args = List(name,
- Apply(Select(New(TypeTree(proxy.tpe)), nme.CONSTRUCTOR),
- // Variant 1: rebind/unbind
- List(name, thiz)))
- // Variant 2: un-/exportObject
- //List(thiz)))
- val rhs = cast(Apply(bind, args), iface.tpe)
- ValDef(sym, rhs)
- }
-
- def mkObjProxies: List[ValDef] = {
- val (outer, captured) =
- capturedObjects(clazz).toList partition (thiz.==)
- (captured ::: outer) map mkProxy
- }
-
- def mkArgProxies: Map[Symbol, ValDef] = {
- def retRefs(t: Tree): List[Tree] = t match {
- case Apply(fun, args) =>
- args flatMap retRefs
- case id @ Ident(_) =>
- if (isRefClass(id.tpe)) List(id) else Nil
- case Template(_, _, body) =>
- body flatMap retRefs
- case New(tpt) =>
- retRefs(tpt)
- case thiz @ This(_) =>
- if (isRefClass(thiz.tpe)) List(thiz) else Nil
- case _ =>
- throw new Error("Internal error: " + t.getClass)
- }
- new immutable.HashMap[Symbol, ValDef] ++ (
- for (variable <- retRefs(apply)) yield {
- val param = variable.symbol
- assert(isRefClass(param.tpe), param)
- val proxy = currentOwner.newValue(param.pos, freshProxyName)
- .setFlag(SYNTHETIC)
- .setInfo(mkRemoteRefClass(param.tpe))
- val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind)
- //val name = Literal(Constant(proxy.fullName('/')))
- val name = Apply(
- Select(Literal(Constant(proxy.fullName('/')+"$")), String_+),
- List(Ident(uid.symbol))
- )
- val ts = param.tpe.typeSymbol
- val args = List(name,
- Apply(
- Select(New(TypeTree(remoteRefImpl(ts).tpe)), nme.CONSTRUCTOR),
- // Variant 1: rebind/unbind
- List(name, variable)))
- // Variant 2: un-/exportObject
- //List(variable)))
- val rhs = cast(Apply(bind, args), remoteRefClass(ts).tpe)
- (param, ValDef(proxy, rhs))
- }
- )
- } //mkArgProxies
-
- /** <p>
- * Method <code>mkClosureInstance</code> updates the list of actual
- * parameters passed to the closure instance.
- * </p>
- */
- def mkClosureInstance(objProxies: List[ValDef],
- argProxies: Map[Symbol, ValDef]): Tree = {
- fun.tpe = fun.symbol.tpe
- val args0 = objProxies map (tree => Ident(tree.symbol))
- val hasOuter = !args.isEmpty && (args.head.symbol.tpe == thiz.tpe)
- val args1 = (if (hasOuter) args.tail else args) map (arg =>
- argProxies get arg.symbol match {
- case Some(t) => Ident(t.symbol)
- case None => arg
- }
- )
- if (DEBUG)
- println("\nmkClosureInstance:\n\targs0="+args0+"\n\targs1="+args1)
- val t = Typed(
- Apply(fun, args0 ::: args1),
- //TypeTree(clazz.info.parents.tail.head) //interface (2.7.x)
- TypeTree(clazz.info.parents.head) //interface (2.8.x)
- )
- localTyper typed t
- } //mkClosureInstance
-
- val objProxies = mkObjProxies
- val argProxies = mkArgProxies
- val stats = uid :: objProxies ::: argProxies.valuesIterator.toList
- val expr = mkClosureInstance(objProxies, argProxies)
- localTyper typed Block(stats, expr)
- } //mkClosureApply
-
- override def transform(tree: Tree): Tree = {
- def withInConstructorFlag(inConstructorFlag: Long)(f: => Tree): Tree = {
- val savedInConstructorFlag = this.inConstructorFlag
- this.inConstructorFlag = inConstructorFlag
- val t = f
- this.inConstructorFlag = savedInConstructorFlag
- t
- }
- if (!isEnabled) return tree
- tree match {
- case ClassDef(mods, name, tparams, impl) =>
- val tree1 = super.transform(tree)
- if (!reporter.hasErrors && (capturedThisClass contains tree1.symbol))
- mkClosureDef(tree1.symbol)
- else
- tree1
-
- case Apply(Select(_, _), _) =>
- val tree1 = super.transform(tree)
- if (!reporter.hasErrors && (detachedClosureApply contains tree1))
- atPos(tree1.pos)(mkClosureApply(tree1))
- else
- tree1
-
- case Template(_, _, _) =>
- withInConstructorFlag(0) { super.transform(tree) }
-
- case _ =>
- super.transform(tree)
- }
- }
-
- /** Transform statements and add detached definitions to them. */
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- val stats1 = super.transformStats(stats, exprOwner)
- val newDefs = {
- val buf = new ListBuffer[Tree]
- if (proxyInterfaceDefs contains currentOwner)
- buf ++= proxyInterfaceDefs(currentOwner).toList
- buf.toList
- }
- if (newDefs.isEmpty) stats1 else stats1 ::: newDefs
- }
-
- private def genProxies() {
- def printDebugInfo() {
- println("\ncompilation unit : "+unit)
- for ((sym, _) <- detachedClosure) {
- println("closure to detach: "+sym+" (owner: "+sym.owner+")")
- println("captured this : "+capturedThisClass(sym))
- val objs = capturedObjects get sym match {
- case Some(ss) => ss.toList
- case None => Nil
- }
- println("captured objects : "+objs.mkString(", ")+" ["+objs.length+"]")
- }
- println("\ncalled functions :")
- for (sym <- capturedFuncs.keysIterator) {
- val xs = capturedFuncs(sym).toList map (s => {
- val callers = capturedCallers get s match {
- case Some(ss) => "|"+ss.toList.mkString(",")
- case None => ""
- }
- s+"("+s.owner.name+callers+")"
- })
- println("\t"+sym+" -> "+xs.mkString(", ")+" ["+xs.length+"]")
- }
- }
- def printDebugInfo2() {
- println("\nproxy classes :")
- for (sym <- proxies.keysIterator)
- println("\t"+sym+"("+sym.tpe+") -> "+proxies(sym))
- }
- if (DEBUG)
- printDebugInfo
- for ((closure, _) <- detachedClosure;
- captured <- capturedObjects(closure))
- addProxy(closure, captured)
- if (DEBUG)
- printDebugInfo2
- for (sym <- proxies.keysIterator)
- genProxy(sym)
- } //genProxies
-
- /** <p>
- * Method <code>transformUnit</code> performs three successive operations:
- * </p>
- * <ol>
- * <li>it first gathers infos about free objects and detached
- * closures;</li>
- * <li>it then adds proxies for free objects;</li>
- * <li>finally, if transforms detached closures (both definition and
- * instantiation).</li>
- * </ol>
- */
- override def transformUnit(unit: CompilationUnit) {
- freeObjTraverser.traverse(unit.body)
- if (!reporter.hasErrors) genProxies
- super.transformUnit(unit)
- }
- }
-
-}
-
diff --git a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala b/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
deleted file mode 100644
index c6e18b7abe..0000000000
--- a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.tools.detach
-
-import scala.tools.nsc.{Global, Phase}
-import scala.tools.nsc.plugins.{Plugin, PluginComponent}
-
-class DetachPlugin(val global: Global) extends Plugin {
- import global._
-
- val name = "detach"
- val description = "Perform detaching of remote closures"
-
- object detach extends {
- val global = DetachPlugin.this.global
- val runsAfter = List("lambdalift")
- override val runsBefore = List("constructors")
- } with Detach
-
- val components = List[PluginComponent](detach)
-
- def setEnabled(flag: Boolean) { detach.isEnabled = flag }
-
- override def processOptions(options: List[String], error: String => Unit) = {
- var enabled = false
- for (option <- options) {
- if (option == "enable") {
- enabled = true
- } else {
- error("Option not understood: "+option)
- }
- }
- setEnabled(enabled)
- }
-
- override val optionsHelp: Option[String] =
- Some(" -P:detach:enable Enable detaching of remote closures")
-}
diff --git a/src/detach/plugin/scalac-plugin.xml b/src/detach/plugin/scalac-plugin.xml
deleted file mode 100644
index 6c8600e331..0000000000
--- a/src/detach/plugin/scalac-plugin.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<plugin>
- <name>detach</name>
- <classname>scala.tools.detach.DetachPlugin</classname>
-</plugin>
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index 39a3f457a0..73aa270b77 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -29,7 +29,7 @@ inside `src/library` with the following contents:
version.number=2.10.0-20120603-141530-b34313db72
maven.version.number=2.10.0-SNAPSHOT
osgi.version.number=2.10.0.v20120603-141530-b34313db72
- copyright.string=Copyright 2002-2012 LAMP/EPFL
+ copyright.string=Copyright 2002-2013 LAMP/EPFL
4. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them
from being shown as dirty in `git status`. You can still ignore them by telling Git to
@@ -44,7 +44,7 @@ If you want to go back to normal (for instance, to commit your changes to projec
DETAILS
=======
-The compiler project depends on the library, reflect, asm and fjbg projects. The
+The compiler project depends on the library, reflect, and asm projects. The
builder will take care of the correct ordering, and changes in one project will
be picked up by the dependent projects.
diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath
deleted file mode 100644
index 3e2f55f48a..0000000000
--- a/src/eclipse/fjbg/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="fjbg"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
- <classpathentry kind="output" path="libs-classes-fjbg"/>
-</classpath>
diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath
new file mode 100644
index 0000000000..870cc67aec
--- /dev/null
+++ b/src/eclipse/interactive/.classpath
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="interactive"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="build-quick-interactive"/>
+</classpath>
diff --git a/src/eclipse/interactive/.project b/src/eclipse/interactive/.project
new file mode 100644
index 0000000000..1d30e0c001
--- /dev/null
+++ b/src/eclipse/interactive/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>interactive</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-interactive</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/interactive</locationURI>
+ </link>
+ <link>
+ <name>interactive</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/interactive</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 7936d4d4b4..a990c5a1b3 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -8,8 +8,8 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/ant/ant.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="output" path="build-quick-partest"/>
</classpath>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
new file mode 100644
index 0000000000..30744da306
--- /dev/null
+++ b/src/eclipse/repl/.classpath
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="repl"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/jline.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="output" path="build-quick-repl"/>
+</classpath>
diff --git a/src/eclipse/repl/.project b/src/eclipse/repl/.project
new file mode 100644
index 0000000000..ea188bc262
--- /dev/null
+++ b/src/eclipse/repl/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>repl</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-repl</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/repl</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>repl</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/repl</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
index d438d3e610..0488a0dc39 100644
--- a/src/eclipse/scala-compiler/.classpath
+++ b/src/eclipse/scala-compiler/.classpath
@@ -3,12 +3,10 @@
<classpathentry kind="src" path="compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/ant/ant.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry kind="output" path="build-quick-compiler"/>
</classpath>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
new file mode 100644
index 0000000000..f12ba4bb2c
--- /dev/null
+++ b/src/eclipse/scaladoc/.classpath
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry combineaccessrules="false" kind="src" path="/partest"/>
+ <classpathentry kind="src" path="scaladoc"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="output" path="build-quick-scaladoc"/>
+</classpath>
diff --git a/src/eclipse/fjbg/.project b/src/eclipse/scaladoc/.project
index 8acea9f5fe..bf7649039f 100644
--- a/src/eclipse/fjbg/.project
+++ b/src/eclipse/scaladoc/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>fjbg</name>
+ <name>scaladoc</name>
<comment></comment>
<projects>
</projects>
@@ -17,14 +17,19 @@
</natures>
<linkedResources>
<link>
- <name>fjbg</name>
+ <name>build-quick-scaladoc</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/src/fjbg</locationURI>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/scaladoc</locationURI>
</link>
<link>
- <name>libs-classes-fjbg</name>
+ <name>lib</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/build/libs/classes/fjbg</locationURI>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>scaladoc</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/scaladoc</locationURI>
</link>
</linkedResources>
</projectDescription>
diff --git a/src/eclipse/scalap/.classpath b/src/eclipse/scalap/.classpath
index 16737bd9cd..0a55745702 100644
--- a/src/eclipse/scalap/.classpath
+++ b/src/eclipse/scalap/.classpath
@@ -7,7 +7,6 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/ant/ant.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry kind="output" path="build-quick-scalap"/>
</classpath>
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
deleted file mode 100644
index 9856dc7311..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-/**
- * Context in which FJBG executes. Used both as a factory for most
- * FJBG classes and as a repository for other factories.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class FJBGContext {
- /** Class file major version */
- final int MAJOR_VERSION;
-
- /** Class file minor version */
- final int MINOR_VERSION;
-
- public FJBGContext() {
- this(45, 3);
- }
-
- public FJBGContext(int major, int minor) {
- MAJOR_VERSION = major;
- MINOR_VERSION = minor;
- }
-
- // Factory methods
- //////////////////////////////////////////////////////////////////////
-
- public JClass JClass(int accessFlags,
- String name,
- String superclassName,
- String[] interfaceNames,
- String sourceFileName) {
- return new JClass(this,
- accessFlags,
- name,
- superclassName,
- interfaceNames,
- sourceFileName);
- }
-
- public JClass JClass(DataInputStream stream)
- throws IOException {
- return new JClass(this, stream);
- }
-
- public JConstantPool JConstantPool() {
- return new JConstantPool(this);
- }
-
- public JConstantPool JConstantPool(DataInputStream stream)
- throws IOException {
- return new JConstantPool(this, stream);
- }
-
- public JField JField(JClass owner,
- int accessFlags,
- String name,
- JType type) {
- return new JField(this,
- owner,
- accessFlags,
- name,
- type);
- }
-
- public JField JField(JClass owner, DataInputStream stream)
- throws IOException {
- return new JField(this, owner, stream);
- }
-
- public JMethod JMethod(JClass owner,
- int accessFlags,
- String name,
- JType returnType,
- JType[] argTypes,
- String[] argNames) {
- return new JMethod(this,
- owner,
- accessFlags,
- name,
- returnType,
- argTypes,
- argNames);
- }
-
- public JMethod JMethod(JClass owner,
- int accessFlags,
- String name,
- JMethodType type,
- String[] argNames) {
- return JMethod(owner,
- accessFlags,
- name,
- type.getReturnType(),
- type.getArgumentTypes(),
- argNames);
- }
-
- public JMethod JMethod(JClass owner, DataInputStream stream)
- throws IOException {
- return new JMethod(this, owner, stream);
- }
-
- public JLocalVariable JLocalVariable(JMethod owner,
- JType type,
- String name,
- int index) {
- return new JLocalVariable(this, owner, type, name, index);
- }
-
- public JCode JCode(JClass clazz, JMethod owner) {
- return new JExtendedCode(this, clazz, owner);
- }
-
- public JCode JCode(JClass clazz, JMethod owner, DataInputStream stream)
- throws IOException {
- return new JCode(this, clazz, owner, stream);
- }
-
- public JAttributeFactory JAttributeFactory() {
- return new JAttributeFactory(this);
- }
-
- // Attributes
- public JCodeAttribute JCodeAttribute(JClass clazz, JMethod owner) {
- return new JCodeAttribute(this, clazz, owner);
- }
-
- public JEnclosingMethodAttribute JEnclosingMethodAttribute(JClass clazz,
- String className,
- String methodName,
- JType methodType) {
- return new JEnclosingMethodAttribute(this, clazz, className, methodName, methodType);
- }
-
- public JExceptionsAttribute JExceptionsAttribute(JClass clazz,
- JMethod owner) {
- return new JExceptionsAttribute(this, clazz, owner);
- }
-
- public JLineNumberTableAttribute JLineNumberTableAttribute(JClass clazz,
- JCode owner) {
- return new JLineNumberTableAttribute(this, clazz, owner);
- }
-
- public JLocalVariableTableAttribute JLocalVariableTableAttribute(JClass clazz,
- JCode owner) {
- return new JLocalVariableTableAttribute(this, clazz, owner);
- }
-
- public JOtherAttribute JOtherAttribute(JClass clazz,
- Object owner,
- String name,
- byte[] contents,
- int length) {
- return new JOtherAttribute(this, clazz, owner, name, contents, length);
- }
-
- public JOtherAttribute JOtherAttribute(JClass clazz,
- Object owner,
- String name,
- byte[] contents) {
- return JOtherAttribute(clazz, owner, name, contents, contents.length);
- }
-
- public JSourceFileAttribute JSourceFileAttribute(JClass clazz,
- String sourceFileName) {
- return new JSourceFileAttribute(this, clazz, sourceFileName);
- }
-
- public JStackMapTableAttribute JStackMapTableAttribute(JClass clazz,
- JCode owner) {
- return new JStackMapTableAttribute(this, clazz, owner);
- }
-
- /// Repository
- //////////////////////////////////////////////////////////////////////
-
- protected JAttributeFactory jAttributeFactory = null;
- public JAttributeFactory getJAttributeFactory() {
- if (jAttributeFactory == null)
- jAttributeFactory = JAttributeFactory();
- return jAttributeFactory;
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
deleted file mode 100644
index 01d8cc9a7e..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Definition of access flags for fields, methods and classes.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public interface JAccessFlags {
- public static int ACC_PUBLIC = 0x0001;
- public static int ACC_PRIVATE = 0x0002;
- public static int ACC_PROTECTED = 0x0004;
- public static int ACC_STATIC = 0x0008;
- public static int ACC_FINAL = 0x0010;
- public static int ACC_SUPER = 0x0020;
- public static int ACC_VOLATILE = 0x0040;
- public static int ACC_TRANSIENT = 0x0080;
- public static int ACC_NATIVE = 0x0100;
- public static int ACC_INTERFACE = 0x0200;
- public static int ACC_ABSTRACT = 0x0400;
- public static int ACC_STRICT = 0x0800;
- public static int ACC_SYNTHETIC = 0x1000;
- public static int ACC_ANNOTATION= 0x2000;
- public static int ACC_ENUM = 0x4000;
-
- // 1.5 specifics
- public static int ACC_BRIDGE = 0x0040;
- public static int ACC_VARARGS = 0x0080;
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
deleted file mode 100644
index 61a04523ca..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java arrays.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JArrayType extends JReferenceType {
- protected final JType elementType;
- protected String signature = null;
-
- public JArrayType(JType elementType) {
- this.elementType = elementType;
- }
-
- public int getSize() { return 1; }
-
- public String getSignature() {
- if (signature == null)
- signature = "[" + elementType.getSignature();
- return signature;
- }
-
- public String getDescriptor() {
- return getSignature();
- }
-
- public int getTag() { return T_ARRAY; }
-
- public JType getElementType() { return elementType; }
-
- public String toString() {
- return elementType.toString() + "[]";
- }
-
- public boolean isArrayType() { return true; }
-
- public boolean isCompatibleWith(JType other) {
- if (other instanceof JObjectType)
- return (JObjectType)other == JObjectType.JAVA_LANG_OBJECT;
- else if (other instanceof JArrayType)
- return elementType.isCompatibleWith(((JArrayType)other).elementType);
- else return other == JType.REFERENCE;
- }
-
- public static JArrayType BOOLEAN = new JArrayType(JType.BOOLEAN);
- public static JArrayType BYTE = new JArrayType(JType.BYTE);
- public static JArrayType CHAR = new JArrayType(JType.CHAR);
- public static JArrayType SHORT = new JArrayType(JType.SHORT);
- public static JArrayType INT = new JArrayType(JType.INT);
- public static JArrayType FLOAT = new JArrayType(JType.FLOAT);
- public static JArrayType LONG = new JArrayType(JType.LONG);
- public static JArrayType DOUBLE = new JArrayType(JType.DOUBLE);
- public static JArrayType REFERENCE = new JArrayType(JType.REFERENCE);
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
deleted file mode 100644
index 6a825beb18..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Abstract superclass for attributes which can be attached to various
- * parts of a class file.
- *
- * Attributes are used for classes (section 4.2), fields (section 4.6),
- * methods (section 4.7) and the Code attribute (section 4.8.3).
- * See sections 4.2 and later of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public abstract class JAttribute {
- protected final int nameIdx;
-
- static public void writeTo(List/*<JAttribute>*/ attrs, DataOutputStream stream)
- throws IOException {
- stream.writeShort(attrs.size());
- Iterator attrsIt = attrs.iterator();
- while (attrsIt.hasNext()) {
- JAttribute attr = (JAttribute)attrsIt.next();
- attr.writeTo(stream);
- }
- }
-
- static public List/*<JAttribute>*/ readFrom(FJBGContext context,
- JClass clazz,
- Object owner,
- DataInputStream stream)
- throws IOException {
- JAttributeFactory factory = context.getJAttributeFactory();
- int count = stream.readShort();
- ArrayList list = new ArrayList(count);
- for (int i = 0; i < count; ++i)
- list.add(factory.newInstance(clazz, owner, stream));
- return list;
- }
-
- public JAttribute(FJBGContext context, JClass clazz) {
- this.nameIdx = clazz.getConstantPool().addUtf8(getName());
- }
-
- public JAttribute(FJBGContext context, JClass clazz, String name) {
- this.nameIdx = clazz.getConstantPool().addUtf8(name);
- }
-
- abstract public String getName();
-
- /**
- * Write the attribute to a stream.
- */
- public void writeTo(DataOutputStream stream) throws IOException {
- int contentsSize = getSize();
-
- stream.writeShort(nameIdx);
- stream.writeInt(contentsSize);
- int streamSizeBefore = stream.size();
- writeContentsTo(stream);
- int streamSizeDiff = stream.size() - streamSizeBefore;
-
- assert contentsSize == streamSizeDiff
- : "invalid size for attribute " + getName()
- + " given: " + contentsSize
- + " actual: " + streamSizeDiff;
- }
-
- // Note: it is not legal to add data to the constant pool during
- // the execution of any of the following two methods.
- protected abstract int getSize();
- protected abstract void writeContentsTo(DataOutputStream stream)
- throws IOException;
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
deleted file mode 100644
index 33cdce2760..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.util.HashMap;
-
-/**
- * Extensible factory to build subclasses of JAttribute based on an
- * attribute name.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JAttributeFactory {
- protected FJBGContext context;
- protected HashMap/*<String, Constructor>*/ constructors = new HashMap();
-
- protected final static Class[] CONSTRUCTOR_ARGS = new Class[] {
- FJBGContext.class,
- JClass.class,
- Object.class,
- String.class,
- int.class,
- DataInputStream.class
- };
-
- protected final static Constructor defaultDefaultConstructor;
- static {
- try {
- defaultDefaultConstructor =
- JOtherAttribute.class.getConstructor(CONSTRUCTOR_ARGS);
- } catch (NoSuchMethodException e) {
- throw new RuntimeException(e);
- }
- }
-
- protected final Constructor defaultConstructor;
-
- public JAttributeFactory(FJBGContext context,
- Constructor defaultConstructor) {
- this.context = context;
- this.defaultConstructor = defaultConstructor;
- registerClass("Code", JCodeAttribute.class);
- registerClass("ConstantValue", JConstantValueAttribute.class);
- registerClass("EnclosingMethod", JEnclosingMethodAttribute.class);
- registerClass("Exceptions", JExceptionsAttribute.class);
- registerClass("InnerClasses", JInnerClassesAttribute.class);
- registerClass("LineNumberTable", JLineNumberTableAttribute.class);
- registerClass("LocalVariableTable", JLocalVariableTableAttribute.class);
- registerClass("SourceFile", JSourceFileAttribute.class);
- registerClass("StackMapTable", JStackMapTableAttribute.class);
- }
-
- public JAttributeFactory(FJBGContext context) {
- this(context, defaultDefaultConstructor);
- }
-
- public void registerClass(String attributeName,
- Class clazz) {
- if (! JAttribute.class.isAssignableFrom(clazz))
- throw new IllegalArgumentException("Not a subclass of JAttribute: "
- + clazz);
-
- try {
- Constructor constr = clazz.getConstructor(CONSTRUCTOR_ARGS);
- constructors.put(attributeName, constr);
- } catch (NoSuchMethodException e) {
- throw new IllegalArgumentException("No appropriate constructor for "
- + clazz);
- }
- }
-
- public JAttribute newInstance(JClass clazz,
- Object owner,
- DataInputStream stream)
- throws IOException {
- String name = clazz.getConstantPool().lookupUtf8(stream.readShort());
- Integer size = new Integer(stream.readInt());
- Constructor constr = (Constructor)constructors.get(name);
- if (constr == null) constr = defaultConstructor;
-
- Object[] args = new Object[] { context, clazz, owner, name, size, stream };
- try {
- return (JAttribute)constr.newInstance(args);
- } catch (InstantiationException e) {
- throw new RuntimeException(e);
- } catch (IllegalAccessException e) {
- throw new RuntimeException(e);
- } catch (InvocationTargetException e) {
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
deleted file mode 100644
index bb1538ec23..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
+++ /dev/null
@@ -1,420 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.util.*;
-import java.io.*;
-
-/**
- * Representation of a Java class.
- *
- * @author Michel Schinz, Stephane Micheloud
- * @version 1.1
- */
-public class JClass extends JMember {
-
- /** Magic number for Java class files. */
- public final static int MAGIC_NUMBER = 0xCAFEBABE;
-
- protected final JAttributeFactory attributeFactory;
-
- protected final String superclassName;
- protected final String[] interfaceNames;
- protected final String sourceFileName;
- protected final JConstantPool pool;
-
- public final static String[] NO_INTERFACES = new String[0];
-
- protected final LinkedList/*<JMethod>*/ methods = new LinkedList();
- protected final LinkedList/*<JField>*/ fields = new LinkedList();
-
- protected JInnerClassesAttribute innerClasses;
-
- protected int major;
- protected int minor;
-
- /**
- * Creates a new class with its access flags, name, superclass name,
- * interfaces names and source file name initialized to a given value.
- * The constructor also initializes the pool and adds a sourceFileName
- * attribute to the class.
- * @param accessFlags the int representing the access flags of the class.
- * @param name the string representing the name of the class.
- * @param superclassName the string representing the name of the class'
- * superclass.
- * @param interfaceNames the list of strings representing the names of the
- * interfaces implemented by the class.
- * @param sourceFileName name of the file from which the class was compiled.
- */
- protected JClass(FJBGContext context,
- int accessFlags,
- String name,
- String superclassName,
- String[] interfaceNames,
- String sourceFileName) {
- super(context, accessFlags, name);
- this.attributeFactory = context.getJAttributeFactory();
-
- this.major = context.MAJOR_VERSION;
- this.minor = context.MINOR_VERSION;
-
- this.superclassName = superclassName;
- this.interfaceNames = interfaceNames;
- this.sourceFileName = sourceFileName;
- this.pool = context.JConstantPool();
- if (sourceFileName != null)
- addAttribute(context.JSourceFileAttribute(this, sourceFileName));
- }
-
- protected JClass(FJBGContext context, DataInputStream stream)
- throws IOException {
- super(context);
- this.attributeFactory = context.getJAttributeFactory();
-
- int magic = stream.readInt();
- if (magic != MAGIC_NUMBER)
- throw new IllegalArgumentException("invalid magic number: "+magic);
-
- minor = stream.readShort();
- major = stream.readShort();
- pool = context.JConstantPool(stream);
- accessFlags = stream.readShort();
-
- // This class, super class and interfaces
- name = pool.lookupClass(stream.readShort());
- superclassName = pool.lookupClass(stream.readShort());
- interfaceNames = new String[stream.readShort()];
- for (int i = 0; i < interfaceNames.length; ++i)
- interfaceNames[i] = pool.lookupClass(stream.readShort());
-
- // Fields, methods and attributes
- int fieldsCount = stream.readShort();
- for (int i = 0; i < fieldsCount; ++i)
- addField(context.JField(this, stream));
-
- int methodsCount = stream.readShort();
- for (int i = 0; i < methodsCount; ++i)
- addMethod(context.JMethod(this, stream));
-
- String fileName = null;
- int attributesCount = stream.readShort();
- for (int i = 0; i < attributesCount; ++i) {
- JAttribute attr = attributeFactory.newInstance(this, this, stream);
- if (attr instanceof JSourceFileAttribute)
- fileName = ((JSourceFileAttribute)attr).getFileName();
- else if (attr instanceof JInnerClassesAttribute)
- innerClasses = (JInnerClassesAttribute)attr;
- addAttribute(attr);
- }
- sourceFileName = fileName;
- }
-
- /**
- * Gets the name of the class' superclass.
- * @return The string representing the name of the class' superclass.
- */
- public String getSuperclassName() { return superclassName; }
-
- /**
- * Gets the names of the interfaces implemented by the class.
- * @return The array containing the string representations of the
- * names of the interfaces implemented by the class.
- */
- public String[] getInterfaceNames() { return interfaceNames; }
-
- /**
- * Gets the source file name of this class.
- * @return The string representing the source file name of this class.
- */
- public String getSourceFileName() { return sourceFileName; }
-
- /**
- * Gets the type of the objects that are instances of the class.
- * @return The type of the instances of the class.
- */
- public JType getType() { return new JObjectType(name); }
-
- public JClass getJClass() { return this; }
-
- public boolean isPublic() {
- return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0;
- }
-
- public boolean isPrivate() {
- return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0;
- }
-
- public boolean isProtected() {
- return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0;
- }
-
- public boolean isStatic() {
- return (accessFlags & JAccessFlags.ACC_STATIC) != 0;
- }
-
- public boolean isFinal() {
- return (accessFlags & JAccessFlags.ACC_FINAL) != 0;
- }
-
- public boolean isAbstract() {
- return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0;
- }
-
- /**
- * Gets the version number of the class.
- * @param major The int representing the major part of the version number
- * of the class.
- * @param minor The int representing the minor part of the version number
- * of the class.
- */
- public void setVersion(int major, int minor) {
- assert !frozen;
- this.major = major;
- this.minor = minor;
- }
-
- /**
- * Gets the major part of the number describing the version of the class.
- * @return The int representing the major part of the version number of
- * the class.
- */
- public int getMajorVersion() { return major; }
-
- /**
- * Gets the minor part of the number describing the version of the class.
- * @return The int representing the minor part of the version number of
- * the class.
- */
- public int getMinorVersion() { return minor; }
-
- /**
- * Gets the constant pool of the class.
- * @return The constant pool of the class.
- */
- public JConstantPool getConstantPool() { return pool; }
-
- public JInnerClassesAttribute getInnerClasses() {
- if (innerClasses == null) {
- innerClasses = new JInnerClassesAttribute(context, this);
- addAttribute(innerClasses);
- }
- return innerClasses;
- }
-
- /**
- * Decides if the class is an interface.
- * @return The boolean representing if the class is an interface or not.
- */
- public boolean isInterface() {
- return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0;
- }
-
- public void addField(JField field) {
- assert !frozen;
- fields.add(field);
- }
-
- /**
- * Create and add a new field to the class.
- */
- public JField addNewField(int accessFlags, String name, JType type) {
- assert !frozen;
- JField f = context.JField(this, accessFlags, name, type);
- addField(f);
- return f;
- }
-
- protected void addMethod(JMethod method) {
- assert !frozen;
- methods.add(method);
- }
-
- /**
- * Create and add a new method to the class.
- */
- public JMethod addNewMethod(int accessFlags,
- String name,
- JType returnType,
- JType[] argTypes,
- String[] argNames) {
- assert !frozen;
- JMethod m = context.JMethod(this,
- accessFlags,
- name,
- returnType,
- argTypes,
- argNames);
- addMethod(m);
- return m;
- }
-
- /**
- * Remove a previously-added method. This makes no attempt at
- * minimising the constant pool by removing all constants which
- * were used only by this method.
- */
- public void removeMethod(JMethod m) {
- assert !frozen;
- methods.remove(m);
- }
-
- public JField[] getFields() {
- return (JField[])fields.toArray(new JField[fields.size()]);
- }
-
- public JMethod[] getMethods() {
- return (JMethod[])methods.toArray(new JMethod[methods.size()]);
- }
-
- /**
- * Freeze the contents of this class so that it can be written to
- * a file.
- */
- public void freeze() {
- assert !frozen;
- frozen = true;
- }
-
- /**
- * Writes the contents of the class to a file referenced by its name.
- * @param fileName The name of the file in which the class must be written.
- */
- public void writeTo(String fileName) throws IOException {
- writeTo(new File(fileName));
- }
-
- /**
- * Writes the contents of the class to a file.
- * @param file The file in which the class must be written.
- */
- public void writeTo(File file) throws IOException {
- File parent = file.getParentFile();
- if (parent != null && !parent.isDirectory())
- if (!parent.mkdirs())
- throw new IOException("cannot create directory " + parent);
-
- FileOutputStream fStream = new FileOutputStream(file);
- BufferedOutputStream bStream = new BufferedOutputStream(fStream);
- DataOutputStream dStream = new DataOutputStream(bStream);
- writeTo(dStream);
- dStream.close();
- bStream.close();
- fStream.close();
- }
-
- /**
- * Writes the contents of the class to a data stream.
- * @param stream The data stream in which the class must be written.
- */
- public void writeTo(DataOutputStream stream) throws IOException {
- if (!frozen) freeze();
-
- int thisClassIdx = pool.addClass(name);
- int superClassIdx = pool.addClass(superclassName);
- int[] interfacesIdx = new int[interfaceNames.length];
-
- for (int i = 0; i < interfaceNames.length; ++i)
- interfacesIdx[i] = pool.addClass(interfaceNames[i]);
-
- pool.freeze();
-
- // Magic number.
- stream.writeInt(MAGIC_NUMBER);
- // Version
- stream.writeShort(minor);
- stream.writeShort(major);
- // Constant pool
- pool.writeTo(stream);
- // Access flags
- stream.writeShort(accessFlags);
-
- // This class, super class and interfaces
- stream.writeShort(thisClassIdx);
- stream.writeShort(superClassIdx);
- stream.writeShort(interfacesIdx.length);
- for (int i = 0; i < interfacesIdx.length; ++i)
- stream.writeShort(interfacesIdx[i]);
-
- // Fields and methods
- stream.writeShort(fields.size());
- Iterator fieldsIt = fields.iterator();
- while (fieldsIt.hasNext())
- ((JField)fieldsIt.next()).writeTo(stream);
-
- stream.writeShort(methods.size());
- Iterator methodsIt = methods.iterator();
- while (methodsIt.hasNext())
- ((JMethod)methodsIt.next()).writeTo(stream);
-
- // Attributes
- JAttribute.writeTo(attributes, stream);
- }
-
- // Follows javap output format for ClassFile.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer();
- if (sourceFileName != null) {
- buf.append("Compiled from \"");
- buf.append(sourceFileName);
- buf.append("\"\n");
- }
- buf.append(getMemberName());
- buf.append(toExternalName(getName()));
- if (!isInterface()) {
- buf.append(" extends ");
- buf.append(toExternalName(getSuperclassName()));
- }
- if (interfaceNames.length > 0) {
- if (isInterface()) buf.append(" extends ");
- else buf.append(" implements ");
- for (int i = 0; i < interfaceNames.length; ++i) {
- if (i > 0) buf.append(",");
- buf.append(toExternalName(interfaceNames[i]));
- }
- }
- buf.append("\n");
- Iterator attrsIt = attributes.iterator();
- while (attrsIt.hasNext()) {
- JAttribute attr = (JAttribute)attrsIt.next();
- buf.append(attr);
- }
- buf.append(" minor version: ");
- buf.append(minor);
- buf.append("\n major version: ");
- buf.append(major);
- buf.append("\n");
- buf.append(pool);
- buf.append("\n{\n");
- JField[] jfields = getFields();
- for (int i = 0; i < jfields.length; ++i) {
- if (i > 0) buf.append("\n");
- buf.append(jfields[i]);
- }
- buf.append("\n");
- JMethod[] jmethods = getMethods();
- for (int i = 0; i < jmethods.length; ++i) {
- if (i > 0) buf.append("\n");
- buf.append(jmethods[i]);
- }
- buf.append("\n}\n");
- return buf.toString();
- }
-
- private String getMemberName() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- if (isInterface())
- buf.append("interface ");
- else {
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- buf.append("class ");
- }
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
deleted file mode 100644
index ab6934ab30..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
+++ /dev/null
@@ -1,1308 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.*;
-
-import ch.epfl.lamp.util.ByteArray;
-
-/**
- * List of instructions, to which Java byte-code instructions can be added.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JCode {
- protected boolean frozen = false;
-
- public static int MAX_CODE_SIZE = 65535;
-
- protected final FJBGContext context;
- protected final JMethod owner;
-
- protected final ByteArray codeArray;
-
- protected final LinkedList/*<ExceptionHandler>*/ exceptionHandlers =
- new LinkedList();
-
- protected final JConstantPool pool;
-
- protected final ArrayList/*<OffsetToPatch>*/ offsetToPatch =
- new ArrayList();
-
- protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE;
- protected int maxStackSize = UNKNOWN_STACK_SIZE;
- protected int[] stackProduction = null;
- protected int[] stackSizes;
-
- protected JCode(FJBGContext context, JClass clazz, JMethod owner) {
- this.context = context;
- this.pool = clazz.getConstantPool();
- this.owner = owner;
- this.codeArray = new ByteArray();
- }
-
- protected JCode(FJBGContext context,
- JClass clazz,
- JMethod owner,
- DataInputStream stream)
- throws IOException {
- this.context = context;
- this.pool = clazz.getConstantPool();
- this.owner = owner;
- owner.setCode(this);
- int size = stream.readInt();
- if (size > MAX_CODE_SIZE) // section 4.10
- throw new Error("code size must be less than " + MAX_CODE_SIZE + ": " + size);
- this.codeArray = new ByteArray(stream, size);
- }
-
- /**
- * Gets the program counter, which is defined as the address of the
- * next instruction.
- * @return The int representing the value of the program counter
- */
- public int getPC() {
- return codeArray.getSize();
- }
-
- /**
- * Gets the size of the code
- * @return The number of bytes of the code
- */
- public int getSize() {
- return codeArray.getSize();
- }
-
- /**
- * Gets the method to which the code belongs
- * @return The method to which the code belongs
- */
- public JMethod getOwner() {
- return owner;
- }
-
- // Stack size
- public int getMaxStackSize() {
- if (maxStackSize == UNKNOWN_STACK_SIZE)
- maxStackSize = computeMaxStackSize();
- return maxStackSize;
- }
-
- // Freezing
- //////////////////////////////////////////////////////////////////////
-
- public static class CodeSizeTooBigException extends OffsetTooBigException {
- public int codeSize;
-
- public CodeSizeTooBigException(int size) {
- codeSize = size;
- }
- }
-
- public void freeze() throws OffsetTooBigException {
- assert !frozen;
-
- if (getSize() > MAX_CODE_SIZE) throw new CodeSizeTooBigException(getSize());
-
- patchAllOffset();
- codeArray.freeze();
- frozen = true;
- }
-
- // Attributes
- //////////////////////////////////////////////////////////////////////
-
- protected final LinkedList/*<JAttribute>*/ attributes = new LinkedList();
-
- public void addAttribute(JAttribute attr) {
- attributes.add(attr);
- }
-
- public List/*<JAttribute>*/ getAttributes() {
- return attributes;
- }
-
- // Emitting code
- //////////////////////////////////////////////////////////////////////
-
- public void emit(JOpcode opcode) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- }
-
- public void emitNOP() { emit(JOpcode.NOP); }
-
- // Constant loading.
- public void emitACONST_NULL() { emit(JOpcode.ACONST_NULL); }
- public void emitICONST_M1() { emit(JOpcode.ICONST_M1); }
- public void emitICONST_0() { emit(JOpcode.ICONST_0); }
- public void emitICONST_1() { emit(JOpcode.ICONST_1); }
- public void emitICONST_2() { emit(JOpcode.ICONST_2); }
- public void emitICONST_3() { emit(JOpcode.ICONST_3); }
- public void emitICONST_4() { emit(JOpcode.ICONST_4); }
- public void emitICONST_5() { emit(JOpcode.ICONST_5); }
- public void emitLCONST_0() { emit(JOpcode.LCONST_0); }
- public void emitLCONST_1() { emit(JOpcode.LCONST_1); }
- public void emitFCONST_0() { emit(JOpcode.FCONST_0); }
- public void emitFCONST_1() { emit(JOpcode.FCONST_1); }
- public void emitFCONST_2() { emit(JOpcode.FCONST_2); }
- public void emitDCONST_0() { emit(JOpcode.DCONST_0); }
- public void emitDCONST_1() { emit(JOpcode.DCONST_1); }
-
- public void emitBIPUSH(int b) { emitU1(JOpcode.BIPUSH, b); }
- public void emitSIPUSH(int s) { emitU2(JOpcode.SIPUSH, s); }
- public void emitLDC(int value) {
- emitU1(JOpcode.LDC, pool.addInteger(value));
- }
- public void emitLDC(float value) {
- emitU1(JOpcode.LDC, pool.addFloat(value));
- }
- public void emitLDC(String value) {
- emitU1(JOpcode.LDC, pool.addString(value));
- }
- public void emitLDC_W(int value) {
- emitU1(JOpcode.LDC_W, pool.addInteger(value));
- }
- public void emitLDC_W(float value) {
- emitU1(JOpcode.LDC_W, pool.addFloat(value));
- }
- public void emitLDC_W(String value) {
- emitU1(JOpcode.LDC_W, pool.addString(value));
- }
- public void emitLDC2_W(long value) {
- emitU2(JOpcode.LDC2_W, pool.addLong(value));
- }
- public void emitLDC2_W(double value) {
- emitU2(JOpcode.LDC2_W, pool.addDouble(value));
- }
-
- // Loading variables.
- public void emitILOAD(int index) { emitU1(JOpcode.ILOAD, index); }
- public void emitLLOAD(int index) { emitU1(JOpcode.LLOAD, index); }
- public void emitFLOAD(int index) { emitU1(JOpcode.FLOAD, index); }
- public void emitDLOAD(int index) { emitU1(JOpcode.DLOAD, index); }
- public void emitALOAD(int index) { emitU1(JOpcode.ALOAD, index); }
-
- public void emitILOAD_0() { emit(JOpcode.ILOAD_0); }
- public void emitILOAD_1() { emit(JOpcode.ILOAD_1); }
- public void emitILOAD_2() { emit(JOpcode.ILOAD_2); }
- public void emitILOAD_3() { emit(JOpcode.ILOAD_3); }
- public void emitLLOAD_0() { emit(JOpcode.LLOAD_0); }
- public void emitLLOAD_1() { emit(JOpcode.LLOAD_1); }
- public void emitLLOAD_2() { emit(JOpcode.LLOAD_2); }
- public void emitLLOAD_3() { emit(JOpcode.LLOAD_3); }
- public void emitFLOAD_0() { emit(JOpcode.FLOAD_0); }
- public void emitFLOAD_1() { emit(JOpcode.FLOAD_1); }
- public void emitFLOAD_2() { emit(JOpcode.FLOAD_2); }
- public void emitFLOAD_3() { emit(JOpcode.FLOAD_3); }
- public void emitDLOAD_0() { emit(JOpcode.DLOAD_0); }
- public void emitDLOAD_1() { emit(JOpcode.DLOAD_1); }
- public void emitDLOAD_2() { emit(JOpcode.DLOAD_2); }
- public void emitDLOAD_3() { emit(JOpcode.DLOAD_3); }
- public void emitALOAD_0() { emit(JOpcode.ALOAD_0); }
- public void emitALOAD_1() { emit(JOpcode.ALOAD_1); }
- public void emitALOAD_2() { emit(JOpcode.ALOAD_2); }
- public void emitALOAD_3() { emit(JOpcode.ALOAD_3); }
-
- public void emitIALOAD() { emit(JOpcode.IALOAD); }
- public void emitLALOAD() { emit(JOpcode.LALOAD); }
- public void emitFALOAD() { emit(JOpcode.FALOAD); }
- public void emitDALOAD() { emit(JOpcode.DALOAD); }
- public void emitAALOAD() { emit(JOpcode.AALOAD); }
- public void emitBALOAD() { emit(JOpcode.BALOAD); }
- public void emitCALOAD() { emit(JOpcode.CALOAD); }
- public void emitSALOAD() { emit(JOpcode.SALOAD); }
-
- // Storing variables.
- public void emitISTORE(int index) { emitU1(JOpcode.ISTORE, index); }
- public void emitLSTORE(int index) { emitU1(JOpcode.LSTORE, index); }
- public void emitFSTORE(int index) { emitU1(JOpcode.FSTORE, index); }
- public void emitDSTORE(int index) { emitU1(JOpcode.DSTORE, index); }
- public void emitASTORE(int index) { emitU1(JOpcode.ASTORE, index); }
-
- public void emitISTORE_0() { emit(JOpcode.ISTORE_0); }
- public void emitISTORE_1() { emit(JOpcode.ISTORE_1); }
- public void emitISTORE_2() { emit(JOpcode.ISTORE_2); }
- public void emitISTORE_3() { emit(JOpcode.ISTORE_3); }
- public void emitLSTORE_0() { emit(JOpcode.LSTORE_0); }
- public void emitLSTORE_1() { emit(JOpcode.LSTORE_1); }
- public void emitLSTORE_2() { emit(JOpcode.LSTORE_2); }
- public void emitLSTORE_3() { emit(JOpcode.LSTORE_3); }
- public void emitFSTORE_0() { emit(JOpcode.FSTORE_0); }
- public void emitFSTORE_1() { emit(JOpcode.FSTORE_1); }
- public void emitFSTORE_2() { emit(JOpcode.FSTORE_2); }
- public void emitFSTORE_3() { emit(JOpcode.FSTORE_3); }
- public void emitDSTORE_0() { emit(JOpcode.DSTORE_0); }
- public void emitDSTORE_1() { emit(JOpcode.DSTORE_1); }
- public void emitDSTORE_2() { emit(JOpcode.DSTORE_2); }
- public void emitDSTORE_3() { emit(JOpcode.DSTORE_3); }
- public void emitASTORE_0() { emit(JOpcode.ASTORE_0); }
- public void emitASTORE_1() { emit(JOpcode.ASTORE_1); }
- public void emitASTORE_2() { emit(JOpcode.ASTORE_2); }
- public void emitASTORE_3() { emit(JOpcode.ASTORE_3); }
-
- public void emitIASTORE() { emit(JOpcode.IASTORE); }
- public void emitLASTORE() { emit(JOpcode.LASTORE); }
- public void emitFASTORE() { emit(JOpcode.FASTORE); }
- public void emitDASTORE() { emit(JOpcode.DASTORE); }
- public void emitAASTORE() { emit(JOpcode.AASTORE); }
- public void emitBASTORE() { emit(JOpcode.BASTORE); }
- public void emitCASTORE() { emit(JOpcode.CASTORE); }
- public void emitSASTORE() { emit(JOpcode.SASTORE); }
-
- // Stack manipulation.
- public void emitPOP() { emit(JOpcode.POP); }
- public void emitPOP2() { emit(JOpcode.POP2); }
- public void emitDUP() { emit(JOpcode.DUP); }
- public void emitDUP_X1() { emit(JOpcode.DUP_X1); }
- public void emitDUP_X2() { emit(JOpcode.DUP_X2); }
- public void emitDUP2() { emit(JOpcode.DUP2); }
- public void emitDUP2_X1() { emit(JOpcode.DUP2_X1); }
- public void emitDUP2_X2() { emit(JOpcode.DUP2_X2); }
- public void emitSWAP() { emit(JOpcode.SWAP); }
-
- // Artithmetic and logic operations.
- public void emitIADD() { emit(JOpcode.IADD); }
- public void emitLADD() { emit(JOpcode.LADD); }
- public void emitFADD() { emit(JOpcode.FADD); }
- public void emitDADD() { emit(JOpcode.DADD); }
-
- public void emitISUB() { emit(JOpcode.ISUB); }
- public void emitLSUB() { emit(JOpcode.LSUB); }
- public void emitFSUB() { emit(JOpcode.FSUB); }
- public void emitDSUB() { emit(JOpcode.DSUB); }
-
- public void emitIMUL() { emit(JOpcode.IMUL); }
- public void emitLMUL() { emit(JOpcode.LMUL); }
- public void emitFMUL() { emit(JOpcode.FMUL); }
- public void emitDMUL() { emit(JOpcode.DMUL); }
-
- public void emitIDIV() { emit(JOpcode.IDIV); }
- public void emitLDIV() { emit(JOpcode.LDIV); }
- public void emitFDIV() { emit(JOpcode.FDIV); }
- public void emitDDIV() { emit(JOpcode.DDIV); }
-
- public void emitIREM() { emit(JOpcode.IREM); }
- public void emitLREM() { emit(JOpcode.LREM); }
- public void emitFREM() { emit(JOpcode.FREM); }
- public void emitDREM() { emit(JOpcode.DREM); }
-
- public void emitINEG() { emit(JOpcode.INEG); }
- public void emitLNEG() { emit(JOpcode.LNEG); }
- public void emitFNEG() { emit(JOpcode.FNEG); }
- public void emitDNEG() { emit(JOpcode.DNEG); }
-
- public void emitISHL() { emit(JOpcode.ISHL); }
- public void emitLSHL() { emit(JOpcode.LSHL); }
-
- public void emitISHR() { emit(JOpcode.ISHR); }
- public void emitLSHR() { emit(JOpcode.LSHR); }
-
- public void emitIUSHR() { emit(JOpcode.IUSHR); }
- public void emitLUSHR() { emit(JOpcode.LUSHR); }
-
- public void emitIAND() { emit(JOpcode.IAND); }
- public void emitLAND() { emit(JOpcode.LAND); }
-
- public void emitIOR() { emit(JOpcode.IOR); }
- public void emitLOR() { emit(JOpcode.LOR); }
-
- public void emitIXOR() { emit(JOpcode.IXOR); }
- public void emitLXOR() { emit(JOpcode.LXOR); }
-
- public void emitIINC(int index, int increment) {
- emitU1U1(JOpcode.IINC, index, increment);
- }
-
- // (Numeric) type conversions.
- public void emitI2L() { emit(JOpcode.I2L); }
- public void emitI2F() { emit(JOpcode.I2F); }
- public void emitI2D() { emit(JOpcode.I2D); }
- public void emitL2I() { emit(JOpcode.L2I); }
- public void emitL2F() { emit(JOpcode.L2F); }
- public void emitL2D() { emit(JOpcode.L2D); }
- public void emitF2I() { emit(JOpcode.F2I); }
- public void emitF2L() { emit(JOpcode.F2L); }
- public void emitF2D() { emit(JOpcode.F2D); }
- public void emitD2I() { emit(JOpcode.D2I); }
- public void emitD2L() { emit(JOpcode.D2L); }
- public void emitD2F() { emit(JOpcode.D2F); }
- public void emitI2B() { emit(JOpcode.I2B); }
- public void emitI2C() { emit(JOpcode.I2C); }
- public void emitI2S() { emit(JOpcode.I2S); }
-
- // Comparisons and tests.
- public void emitLCMP() { emit(JOpcode.LCMP); }
- public void emitFCMPL() { emit(JOpcode.FCMPL); }
- public void emitFCMPG() { emit(JOpcode.FCMPG); }
- public void emitDCMPL() { emit(JOpcode.DCMPL); }
- public void emitDCMPG() { emit(JOpcode.DCMPG); }
-
- protected void emitGenericIF(JOpcode opcode, Label label)
- throws OffsetTooBigException {
- emitU2(opcode, label.getOffset16(getPC() + 1, getPC()));
- }
-
- public void emitIFEQ(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFEQ, label);
- }
- public void emitIFEQ(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFEQ, targetPC - getPC());
- }
- public void emitIFEQ() {
- emitU2(JOpcode.IFEQ, 0);
- }
-
- public void emitIFNE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFNE, label);
- }
- public void emitIFNE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFNE, targetPC - getPC());
- }
- public void emitIFNE() {
- emitU2(JOpcode.IFNE, 0);
- }
-
- public void emitIFLT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFLT, label);
- }
- public void emitIFLT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFLT, targetPC - getPC());
- }
- public void emitIFLT() {
- emitU2(JOpcode.IFLT, 0);
- }
-
- public void emitIFGE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFGE, label);
- }
- public void emitIFGE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFGE, targetPC - getPC());
- }
- public void emitIFGE() {
- emitU2(JOpcode.IFGE, 0);
- }
-
- public void emitIFGT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFGT, label);
- }
- public void emitIFGT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFGT, targetPC - getPC());
- }
- public void emitIFGT() {
- emitU2(JOpcode.IFGT, 0);
- }
-
- public void emitIFLE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFLE, label);
- }
- public void emitIFLE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFLE, targetPC - getPC());
- }
- public void emitIFLE() {
- emitU2(JOpcode.IFLE, 0);
- }
-
- public void emitIF_ICMPEQ(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPEQ, label);
- }
- public void emitIF_ICMPEQ(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPEQ, targetPC - getPC());
- }
- public void emitIF_ICMPEQ() {
- emitU2(JOpcode.IF_ICMPEQ, 0);
- }
-
- public void emitIF_ICMPNE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPNE, label);
- }
- public void emitIF_ICMPNE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPNE, targetPC - getPC());
- }
- public void emitIF_ICMPNE() {
- emitU2(JOpcode.IF_ICMPNE, 0);
- }
-
- public void emitIF_ICMPLT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPLT, label);
- }
- public void emitIF_ICMPLT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPLT, targetPC - getPC());
- }
- public void emitIF_ICMPLT() {
- emitU2(JOpcode.IF_ICMPLT, 0);
- }
-
- public void emitIF_ICMPGE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPGE, label);
- }
- public void emitIF_ICMPGE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPGE, targetPC - getPC());
- }
- public void emitIF_ICMPGE() {
- emitU2(JOpcode.IF_ICMPGE, 0);
- }
-
- public void emitIF_ICMPGT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPGT, label);
- }
- public void emitIF_ICMPGT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPGT, targetPC - getPC());
- }
- public void emitIF_ICMPGT() {
- emitU2(JOpcode.IF_ICMPGT, 0);
- }
-
- public void emitIF_ICMPLE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPLE, label);
- }
- public void emitIF_ICMPLE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPLE, targetPC - getPC());
- }
- public void emitIF_ICMPLE() {
- emitU2(JOpcode.IF_ICMPLE, 0);
- }
-
- public void emitIF_ACMPEQ(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ACMPEQ, label);
- }
- public void emitIF_ACMPEQ(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ACMPEQ, targetPC - getPC());
- }
- public void emitIF_ACMPEQ() {
- emitU2(JOpcode.IF_ACMPEQ, 0);
- }
-
- public void emitIF_ACMPNE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ACMPNE, label);
- }
- public void emitIF_ACMPNE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ACMPNE, targetPC - getPC());
- }
- public void emitIF_ACMPNE() {
- emitU2(JOpcode.IF_ACMPNE, 0);
- }
-
- public void emitIFNULL(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFNULL, label);
- }
- public void emitIFNULL(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFNULL, targetPC - getPC());
- }
- public void emitIFNULL() {
- emitU2(JOpcode.IFNULL, 0);
- }
-
- public void emitIFNONNULL(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFNONNULL, label);
- }
- public void emitIFNONNULL(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFNONNULL, targetPC - getPC());
- }
- public void emitIFNONNULL() {
- emitU2(JOpcode.IFNONNULL, 0);
- }
-
- public void emitGOTO(Label label) throws OffsetTooBigException {
- emitU2(JOpcode.GOTO, label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitGOTO(int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- checkOffset16(offset);
- emitU2(JOpcode.GOTO, offset);
- }
- public void emitGOTO() {
- emitU2(JOpcode.GOTO, 0);
- }
-
- public void emitGOTO_W(Label label) {
- emitU4(JOpcode.GOTO_W, label.getOffset32(getPC() + 1, getPC()));
- }
- public void emitGOTO_W(int targetPC) {
- emitU4(JOpcode.GOTO_W, targetPC - getPC());
- }
- public void emitGOTO_W() {
- emitU4(JOpcode.GOTO_W, 0);
- }
-
- public void emitJSR(Label label) throws OffsetTooBigException {
- emitU2(JOpcode.JSR, label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitJSR(int targetPC) {
- emitU2(JOpcode.JSR, targetPC - getPC());
- }
- public void emitJSR() {
- emitU2(JOpcode.JSR, 0);
- }
-
- public void emitJSR_W(Label label) {
- emitU4(JOpcode.JSR_W, label.getOffset32(getPC() + 1, getPC()));
- }
- public void emitJSR_W(int targetPC) {
- emitU4(JOpcode.JSR_W, targetPC - getPC());
- }
- public void emitJSR_W() {
- emitU4(JOpcode.JSR_W, 0);
- }
-
- /*
- public void emitRET(Label label) throws OffsetTooBigException {
- emitU2(JOpcode.RET, label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitRET(int targetPC) {
- emitU1(JOpcode.RET, targetPC);
- }
- public void emitRET() {
- emitU1(JOpcode.RET, 0);
- }
- */
-
- public void emitRET(int index) {
- emitU1(JOpcode.RET, index);
- }
-
- public void emitRET(JLocalVariable var) {
- emitRET(var.getIndex());
- }
-
- public void emitTABLESWITCH(int[] keys,
- Label[] branches,
- Label defaultBranch) {
- assert keys.length == branches.length;
-
- int low = keys[0], high = keys[keys.length - 1];
- int instrPC = getPC();
-
- setStackProduction(instrPC, JOpcode.TABLESWITCH);
- codeArray.addU1(JOpcode.cTABLESWITCH);
- while (getPC() % 4 != 0) codeArray.addU1(0);
-
- codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC));
- codeArray.addU4(low);
- codeArray.addU4(high);
- for (int i = 0; i < branches.length; i++) {
- assert keys[i] == low + i;
- codeArray.addU4(branches[i].getOffset32(getPC(), instrPC));
- }
- }
-
- public void emitLOOKUPSWITCH(int[] keys,
- Label[] branches,
- Label defaultBranch) {
- assert keys.length == branches.length;
-
- int instrPC = getPC();
- setStackProduction(getPC(), JOpcode.LOOKUPSWITCH);
- codeArray.addU1(JOpcode.cLOOKUPSWITCH);
- while (getPC() % 4 != 0) codeArray.addU1(0);
-
- codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC));
- codeArray.addU4(branches.length);
- for (int i = 0; i < branches.length; i++) {
- codeArray.addU4(keys[i]);
- codeArray.addU4(branches[i].getOffset32(getPC(), instrPC));
- }
- }
-
- public void emitIRETURN() { emit(JOpcode.IRETURN); }
- public void emitLRETURN() { emit(JOpcode.LRETURN); }
- public void emitFRETURN() { emit(JOpcode.FRETURN); }
- public void emitDRETURN() { emit(JOpcode.DRETURN); }
- public void emitARETURN() { emit(JOpcode.ARETURN); }
- public void emitRETURN() { emit(JOpcode.RETURN); }
-
- // Field access
- public void emitGETSTATIC(String className, String name, JType type) {
- setStackProduction(getPC(), type.getSize());
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.GETSTATIC, index);
- }
- public void emitPUTSTATIC(String className, String name, JType type) {
- setStackProduction(getPC(), -type.getSize());
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.PUTSTATIC, index);
- }
- public void emitGETFIELD(String className, String name, JType type) {
- setStackProduction(getPC(), type.getSize() - 1);
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.GETFIELD, index);
- }
- public void emitPUTFIELD(String className, String name, JType type) {
- setStackProduction(getPC(), -(type.getSize() + 1));
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.PUTFIELD, index);
- }
-
- // Method invocation
- public void emitINVOKEVIRTUAL(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack() - 1);
- int index =
- pool.addClassMethodRef(className, name, type.getSignature());
- emitU2(JOpcode.INVOKEVIRTUAL, index);
- }
- public void emitINVOKESPECIAL(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack() - 1);
- int index =
- pool.addClassMethodRef(className, name, type.getSignature());
- emitU2(JOpcode.INVOKESPECIAL, index);
- }
- public void emitINVOKESTATIC(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack());
- int index =
- pool.addClassMethodRef(className, name, type.getSignature());
- emitU2(JOpcode.INVOKESTATIC, index);
- }
- public void emitINVOKEINTERFACE(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack() - 1);
- int index =
- pool.addInterfaceMethodRef(className, name, type.getSignature());
- emitU2U1U1(JOpcode.INVOKEINTERFACE, index, type.getArgsSize() + 1, 0);
- }
-
- // Object creation
- public void emitNEW(String className) {
- emitU2(JOpcode.NEW, pool.addClass(className));
- }
- public void emitNEWARRAY(JType elemType) {
- emitU1(JOpcode.NEWARRAY, elemType.getTag());
- }
- public void emitANEWARRAY(JReferenceType elemType) {
- emitU2(JOpcode.ANEWARRAY, pool.addDescriptor(elemType));
- }
- public void emitMULTIANEWARRAY(JReferenceType elemType, int dimensions) {
- setStackProduction(getPC(), -dimensions + 1);
- emitU2U1(JOpcode.MULTIANEWARRAY,
- pool.addDescriptor(elemType),
- dimensions);
- }
- public void emitARRAYLENGTH() { emit(JOpcode.ARRAYLENGTH); }
-
- // Exception throwing
- public void emitATHROW() { emit(JOpcode.ATHROW); }
-
- // Dynamic typing
- public void emitCHECKCAST(JReferenceType type) {
- emitU2(JOpcode.CHECKCAST, pool.addDescriptor(type));
- }
- public void emitINSTANCEOF(JReferenceType type) {
- emitU2(JOpcode.INSTANCEOF, pool.addDescriptor(type));
- }
-
- // Monitors
- public void emitMONITORENTER() { emit(JOpcode.MONITORENTER); }
- public void emitMONITOREXIT() { emit(JOpcode.MONITOREXIT); }
-
- // Wide variants
- // FIXME setStackProd. will here raise an exception
- public void emitWIDE(JOpcode opcode, int index) {
- assert (opcode.code == JOpcode.cILOAD)
- || (opcode.code == JOpcode.cLLOAD)
- || (opcode.code == JOpcode.cFLOAD)
- || (opcode.code == JOpcode.cDLOAD)
- || (opcode.code == JOpcode.cALOAD)
- || (opcode.code == JOpcode.cISTORE)
- || (opcode.code == JOpcode.cLSTORE)
- || (opcode.code == JOpcode.cFSTORE)
- || (opcode.code == JOpcode.cDSTORE)
- || (opcode.code == JOpcode.cASTORE)
- || (opcode.code == JOpcode.cRET)
- : "invalide opcode for WIDE: " + opcode;
-
- setStackProduction(getPC(), opcode);
- codeArray.addU1(JOpcode.WIDE.code);
- codeArray.addU1(opcode.code);
- codeArray.addU2(index);
- }
- public void emitWIDE(JOpcode opcode, int index, int constant) {
- assert opcode.code == JOpcode.cIINC
- : "invalid opcode for WIDE: " + opcode;
-
- setStackProduction(getPC(), opcode);
- codeArray.addU1(JOpcode.cWIDE);
- codeArray.addU1(opcode.code);
- codeArray.addU2(index);
- codeArray.addU2(constant);
- }
-
- protected void emitU1(JOpcode opcode, int i1) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU1(i1);
- }
-
- protected void emitU1U1(JOpcode opcode, int i1, int i2) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU1(i1);
- codeArray.addU1(i2);
- }
-
- protected void emitU2(JOpcode opcode, int i1) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU2(i1);
- }
-
- protected void emitU2U1(JOpcode opcode, int i1, int i2) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU2(i1);
- codeArray.addU1(i2);
- }
-
- protected void emitU2U1U1(JOpcode opcode, int i1, int i2, int i3) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU2(i1);
- codeArray.addU1(i2);
- codeArray.addU1(i3);
- }
-
- protected void emitU4(JOpcode opcode, int i1) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU4(i1);
- }
-
- protected int getU1(int sourcePos) {
- return codeArray.getU1(sourcePos);
- }
-
- protected int getU2(int sourcePos) {
- return codeArray.getU2(sourcePos);
- }
-
- protected int getU4(int sourcePos) {
- return codeArray.getU4(sourcePos);
- }
-
- protected int getS1(int sourcePos) {
- return codeArray.getS1(sourcePos);
- }
-
- protected int getS2(int sourcePos) {
- return codeArray.getS2(sourcePos);
- }
-
- protected int getS4(int sourcePos) {
- return codeArray.getS4(sourcePos);
- }
-
- // Stack size computation
- //////////////////////////////////////////////////////////////////////
-
- protected int getStackProduction(int pc) {
- if (stackProduction == null || pc >= stackProduction.length)
- return UNKNOWN_STACK_SIZE;
- else
- return stackProduction[pc];
- }
-
- protected void setStackProduction(int pc, int production) {
- if (stackProduction == null) {
- stackProduction = new int[256];
- Arrays.fill(stackProduction, UNKNOWN_STACK_SIZE);
- } else {
- while (pc >= stackProduction.length) {
- int[] newStackProduction = new int[stackProduction.length * 2];
- System.arraycopy(stackProduction, 0,
- newStackProduction, 0,
- stackProduction.length);
- Arrays.fill(newStackProduction,
- stackProduction.length,
- newStackProduction.length,
- UNKNOWN_STACK_SIZE);
- stackProduction = newStackProduction;
- }
- }
- stackProduction[pc] = production;
- }
-
- protected void setStackProduction(int pc, JOpcode opcode) {
- // TODO we should instead check whether the opcode has known
- // stack consumption/production.
- if (getStackProduction(pc) == UNKNOWN_STACK_SIZE)
-// && opcode.hasKnownProducedDataSize()
-// && opcode.hasKnownConsumedDataSize())
- setStackProduction(pc,
- opcode.getProducedDataSize()
- - opcode.getConsumedDataSize());
- }
-
- protected int computeMaxStackSize() {
- if (stackSizes == null) {
- stackSizes = new int[getSize()];
- Arrays.fill(stackSizes, UNKNOWN_STACK_SIZE);
- stackSizes[0] = 0;
- }
- int size = computeMaxStackSize(0, 0, 0);
-
- // compute stack sizes for exception handlers too
- ExceptionHandler exh = null;
- for (Iterator it = exceptionHandlers.iterator();
- it.hasNext();) {
- exh = (ExceptionHandler)it.next();
- int exhSize = computeMaxStackSize(exh.getHandlerPC(), 1, 1);
- if (size < exhSize)
- size = exhSize;
- }
-
- return size;
- }
-
- protected int computeMaxStackSize(int pc, int stackSize, int maxStackSize) {
- JCodeIterator iterator = new JCodeIterator(this, pc);
- for (;;) {
- int successors = iterator.getSuccessorCount();
- if (successors == 0)
- return maxStackSize;
- else {
- assert stackProduction[iterator.getPC()] != UNKNOWN_STACK_SIZE
- : "unknown stack production, pc=" + iterator.getPC()
- + " in method " + owner.getName();
- stackSize += stackProduction[iterator.getPC()];
- if (stackSize > maxStackSize)
- maxStackSize = stackSize;
- int nextPC = -1;
- for (int i = 0; i < successors; ++i) {
- int succPC = iterator.getSuccessorPC(i);
- assert succPC >= 0 && succPC < stackSizes.length
- : iterator.getPC() + ": invalid pc: " + succPC
- + " op: " + iterator.getOpcode();
- if (stackSizes[succPC] == UNKNOWN_STACK_SIZE) {
- stackSizes[succPC] = stackSize;
- if (nextPC == -1)
- nextPC = succPC;
- else
- maxStackSize = computeMaxStackSize(succPC,
- stackSize,
- maxStackSize);
- }
- }
- if (nextPC == -1)
- return maxStackSize;
- else
- iterator.moveTo(nextPC);
- }
- }
- }
-
- // Labels
- //////////////////////////////////////////////////////////////////////
-
- public static class OffsetTooBigException extends Exception {
- public OffsetTooBigException() { super(); }
- public OffsetTooBigException(String message) { super(message); }
- }
-
- protected void checkOffset16(int offset) throws OffsetTooBigException {
- if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE)
- throw new OffsetTooBigException("offset too big to fit"
- + " in 16 bits: " + offset);
- }
-
- public class Label {
- protected boolean anchored = false;
- protected int targetPC = 0;
-
- public void anchorToNext() {
- assert !anchored;
- this.targetPC = getPC();
- anchored = true;
- }
-
- public int getAnchor() {
- assert anchored;
- return targetPC;
- }
-
- protected int getOffset16(int pc, int instrPC)
- throws OffsetTooBigException {
- if (anchored) {
- int offset = targetPC - instrPC;
- checkOffset16(offset);
- return offset;
- } else {
- recordOffsetToPatch(pc, 16, instrPC, this);
- return 0;
- }
- }
-
- protected int getOffset32(int pc, int instrPC) {
- if (anchored)
- return targetPC - instrPC;
- else {
- recordOffsetToPatch(pc, 32, instrPC, this);
- return 0;
- }
- }
- }
-
- public Label newLabel() {
- return new Label();
- }
-
- public Label[] newLabels(int count) {
- Label[] labels = new Label[count];
- for (int i = 0; i < labels.length; ++i)
- labels[i] = newLabel();
- return labels;
- }
-
- protected static class OffsetToPatch {
- public final int pc;
- public final int size;
- public final int instrPC;
- public final Label label;
-
- public OffsetToPatch(int pc, int size, int instrPC, Label label) {
- this.pc = pc;
- this.size = size;
- this.instrPC = instrPC;
- this.label = label;
- }
- }
-
- protected void recordOffsetToPatch(int offsetPC,
- int size,
- int instrPC,
- Label label) {
- offsetToPatch.add(new OffsetToPatch(offsetPC, size, instrPC, label));
- }
-
- protected void patchAllOffset() throws OffsetTooBigException {
- Iterator offsetIt = offsetToPatch.iterator();
- while (offsetIt.hasNext()) {
- OffsetToPatch offset = (OffsetToPatch)offsetIt.next();
- int offsetValue = offset.label.getAnchor() - offset.instrPC;
- if (offset.size == 16) {
- checkOffset16(offsetValue);
- codeArray.putU2(offset.pc, offsetValue);
- } else
- codeArray.putU4(offset.pc, offsetValue);
- }
- }
-
- // Exception handling
- //////////////////////////////////////////////////////////////////////
-
- public class ExceptionHandler {
- protected int startPC, endPC, handlerPC;
- protected final String catchType;
- protected final int catchTypeIndex;
-
- public void setStartPC(int pc) {
- this.startPC = pc;
- }
-
- public int getStartPC() {
- return this.startPC;
- }
-
- public void setEndPC(int pc) {
- this.endPC = pc;
- }
-
- public int getEndPC() {
- return this.endPC;
- }
-
- public void setHandlerPC(int pc) {
- this.handlerPC = pc;
- }
-
- public int getHandlerPC() {
- return this.handlerPC;
- }
-
- public ExceptionHandler(String catchType) {
- this(0, 0, 0, catchType);
- }
-
- public ExceptionHandler(int startPC,
- int endPC,
- int handlerPC,
- String catchType) {
- this.startPC = startPC;
- this.endPC = endPC;
- this.handlerPC = handlerPC;
- this.catchType = catchType;
- this.catchTypeIndex = (catchType == null
- ? 0
- : pool.addClass(catchType));
- }
-
- public ExceptionHandler(DataInputStream stream) throws IOException {
- this.startPC = stream.readShort();
- this.endPC = stream.readShort();
- this.handlerPC = stream.readShort();
- this.catchTypeIndex = stream.readShort();
- this.catchType = (catchTypeIndex == 0
- ? null
- : pool.lookupClass(catchTypeIndex));
- }
-
- public void writeTo(DataOutputStream stream) throws IOException {
- stream.writeShort(startPC);
- stream.writeShort(endPC);
- stream.writeShort(handlerPC);
- stream.writeShort(catchTypeIndex);
- }
-
- // Follows javap output format for exception handlers.
- /*@Override*/public String toString() {
- StringBuffer buf = new StringBuffer(" ");
- if (startPC < 10) buf.append(" ");
- buf.append(startPC);
- buf.append(" ");
- if (endPC < 10) buf.append(" ");
- buf.append(endPC);
- buf.append(" ");
- buf.append(handlerPC);
- buf.append(" ");
- if (catchType != null) {
- buf.append("Class ");
- buf.append(catchType);
- }
- else
- buf.append("any");
- return buf.toString();
- }
-
- }
-
- public void addExceptionHandler(ExceptionHandler handler) {
- assert !frozen;
- exceptionHandlers.add(handler);
- }
-
- public void addExceptionHandler(int startPC,
- int endPC,
- int handlerPC,
- String catchType) {
- addExceptionHandler(new ExceptionHandler(startPC,
- endPC,
- handlerPC,
- catchType));
- }
-
- public void addFinallyHandler(int startPC, int endPC, int handlerPC) {
- assert !frozen;
- addExceptionHandler(startPC, endPC, handlerPC, null);
- }
-
- public List/*<ExceptionHandler>*/ getExceptionHandlers() {
- return exceptionHandlers;
- }
-
- // Line numbers
- //////////////////////////////////////////////////////////////////////
-
- protected int[] lineNumbers = null;
- protected void ensureLineNumberCapacity(int endPC) {
- assert !frozen;
- if (lineNumbers == null) {
- lineNumbers = new int[endPC];
- addAttribute(context.JLineNumberTableAttribute(owner.getOwner(),
- this));
- } else if (lineNumbers.length < endPC) {
- int[] newLN = new int[Math.max(endPC, lineNumbers.length * 2)];
- System.arraycopy(lineNumbers, 0, newLN, 0, lineNumbers.length);
- lineNumbers = newLN;
- }
- }
-
- /**
- * Set all line numbers in the interval [startPC, endPC) to
- * line, overwriting existing line numbers.
- */
- public void setLineNumber(int startPC, int endPC, int line) {
- ensureLineNumberCapacity(endPC);
- Arrays.fill(lineNumbers, startPC, endPC, line);
- }
-
- public void setLineNumber(int instrPC, int line) {
- setLineNumber(instrPC, instrPC + 1, line);
- }
-
- /** Sets all non-filled line numbers in the interval [startPC, endPC)
- * to 'line'.
- */
- public void completeLineNumber(int startPC, int endPC, int line) {
- ensureLineNumberCapacity(endPC);
- for (int pc = startPC; pc < endPC; ++pc)
- if (lineNumbers[pc] == 0) lineNumbers[pc] = line;
- }
-
- public int[] getLineNumbers() {
- assert frozen;
- if (lineNumbers == null) return new int[0];
- else if (lineNumbers.length == getPC()) return lineNumbers;
- else {
- int[] trimmedLN = new int[getPC()];
- System.arraycopy(lineNumbers, 0,
- trimmedLN, 0,
- Math.min(lineNumbers.length, trimmedLN.length));
- return trimmedLN;
- }
- }
-
- // Output
- //////////////////////////////////////////////////////////////////////
-
- public void writeTo(DataOutputStream stream) throws IOException {
- assert frozen;
- stream.writeInt(getSize());
- codeArray.writeTo(stream);
- }
-
- // Follows javap output format for opcodes.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer();
- JOpcode opcode = null;
- int pc = 0, addr = 0;
- while (pc < codeArray.getSize()) {
- buf.append("\n ");
- buf.append(pc);
- buf.append(":\t");
- opcode = JOpcode.OPCODES[codeArray.getU1(pc)];
- buf.append(decode(opcode, pc));
- if (opcode.code == JOpcode.cTABLESWITCH ||
- opcode.code == JOpcode.cLOOKUPSWITCH) {
- addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data
- int low = codeArray.getU4(addr);
- int high = codeArray.getU4(addr+4);
- pc = addr + (2/*low+high*/ + (high - low + 1)/*targets*/) * 4;
- } else
- pc += opcode.getSize();
- }
- if (exceptionHandlers.size() > 0) {
- buf.append("\n Exception table:\n from to target type\n");
- Iterator it = exceptionHandlers.iterator();
- while (it.hasNext()) {
- ExceptionHandler exh = (ExceptionHandler)it.next();
- buf.append(exh);
- buf.append("\n");
- }
- }
- return buf.toString();
- }
-
- private String decode(JOpcode opcode, int pc) {
- String ownerClassName = owner.getOwner().getName();
- int data, data2;
- StringBuilder buf = new StringBuilder();
- buf.append(opcode.name.toLowerCase());
- switch (opcode.code) {
- case JOpcode.cALOAD: case JOpcode.cASTORE: case JOpcode.cBIPUSH:
- case JOpcode.cDLOAD: case JOpcode.cDSTORE:
- case JOpcode.cFLOAD: case JOpcode.cFSTORE:
- case JOpcode.cILOAD: case JOpcode.cISTORE:
- case JOpcode.cLLOAD: case JOpcode.cLSTORE:
- data = codeArray.getU1(pc+1);
- buf.append("\t");
- buf.append(data);
- break;
- case JOpcode.cLDC:
- data = codeArray.getU1(pc+1);
- buf.append("\t#");
- buf.append(data);
- buf.append("; ");
- buf.append(pool.lookupEntry(data).toComment(ownerClassName));
- break;
- case JOpcode.cNEWARRAY:
- data = codeArray.getU1(pc+1);
- buf.append(" ");
- buf.append(JType.tagToString(data));
- break;
- case JOpcode.cIINC:
- data = codeArray.getU1(pc+1);
- data2 = codeArray.getU1(pc+2);
- buf.append("\t");
- buf.append(data);
- buf.append(", ");
- buf.append(data2);
- break;
- case JOpcode.cSIPUSH:
- data = codeArray.getU2(pc+1);
- buf.append("\t");
- buf.append(data);
- break;
- case JOpcode.cANEWARRAY: case JOpcode.cCHECKCAST:
- case JOpcode.cGETFIELD: case JOpcode.cGETSTATIC:
- case JOpcode.cINSTANCEOF:
- case JOpcode.cINVOKESPECIAL: case JOpcode.cINVOKESTATIC:
- case JOpcode.cINVOKEVIRTUAL:
- case JOpcode.cLDC_W: case JOpcode.cLDC2_W: case JOpcode.cNEW:
- case JOpcode.cPUTFIELD: case JOpcode.cPUTSTATIC:
- data = codeArray.getU2(pc+1);
- buf.append("\t#");
- buf.append(data);
- buf.append("; ");
- buf.append(pool.lookupEntry(data).toComment(ownerClassName));
- break;
- case JOpcode.cIF_ACMPEQ: case JOpcode.cIF_ACMPNE:
- case JOpcode.cIFEQ: case JOpcode.cIFGE: case JOpcode.cIFGT:
- case JOpcode.cIFLE: case JOpcode.cIFLT: case JOpcode.cIFNE:
- case JOpcode.cIFNONNULL: case JOpcode.cIFNULL:
- case JOpcode.cIF_ICMPEQ: case JOpcode.cIF_ICMPGE:
- case JOpcode.cIF_ICMPGT: case JOpcode.cIF_ICMPLE:
- case JOpcode.cIF_ICMPLT: case JOpcode.cIF_ICMPNE:
- data = codeArray.getU2(pc+1); // maybe S2 offset
- buf.append("\t");
- buf.append(pc+data);
- break;
- case JOpcode.cGOTO:
- data = codeArray.getS2(pc+1); // always S2 offset
- buf.append("\t");
- buf.append(pc+data);
- break;
- case JOpcode.cINVOKEINTERFACE:
- data = codeArray.getU2(pc+1);
- data2 = codeArray.getU1(pc+3);
- buf.append("\t#");
- buf.append(data);
- buf.append(", ");
- buf.append(data2);
- buf.append("; ");
- buf.append(pool.lookupEntry(data).toComment(ownerClassName));
- break;
- case JOpcode.cTABLESWITCH:
- buf.append("{ //");
- int addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data
- int low = codeArray.getU4(addr);
- int high = codeArray.getU4(addr+4);
- buf.append(low);
- buf.append(" to ");
- buf.append(high);
- for (int i = low; i <= high; ++i) {
- buf.append("\n\t\t");
- buf.append(i);
- buf.append(": ");
- buf.append(pc+codeArray.getU4(addr+(i-1)*4));
- buf.append(";");
- }
- buf.append("\n\t\tdefault: ");
- buf.append(pc+codeArray.getU4(addr-4));
- buf.append(" }");
- default:
- }
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
deleted file mode 100644
index 9f3fcf8c6a..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Code attribute, containing code of methods.
- *
- * A Code attribute contains the JVM instructions and auxiliary information
- * for a single method, instance initialization method, or class or interface
- * initialization method. See section 4.8.3 of the JVM specification.
- *
- * @author Michel Schinz, Stephane Micheloud
- * @version 1.1
- */
-
-public class JCodeAttribute extends JAttribute {
- protected final JCode code;
- protected final JMethod owner;
- protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE;
- protected final int maxStackSize;
- protected final int maxLocals;
-
- public JCodeAttribute(FJBGContext context, JClass clazz, JMethod owner) {
- super(context, clazz);
- this.owner = owner;
-
- this.maxStackSize = UNKNOWN_STACK_SIZE;
- this.maxLocals = 0; // unknown
- this.code = owner.getCode();
-
- assert clazz == owner.getOwner();
- }
-
- public JCodeAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.owner = (JMethod)owner;
-
- this.maxStackSize = stream.readShort();
- this.maxLocals = stream.readShort();
- this.code = context.JCode(clazz, (JMethod)owner, stream);
-
- int handlersCount = stream.readShort();
- for (int i = 0; i < handlersCount; ++i)
- code.addExceptionHandler(code.new ExceptionHandler(stream));
- List/*<JAttribute>*/ attributes =
- JAttribute.readFrom(context, clazz, code, stream);
- Iterator attrIt = attributes.iterator();
- while (attrIt.hasNext())
- code.addAttribute((JAttribute)attrIt.next());
-
- assert name.equals(getName());
- }
-
- public String getName() { return "Code"; }
-
- // Follows javap output format for Code attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Code:");
- buf.append("\n Stack=");
- buf.append(maxStackSize);
- buf.append(", Locals=");
- buf.append(maxLocals);
- buf.append(", Args_size=");
- buf.append(owner.getArgsSize());
- buf.append(code);
- buf.append("\n");
- Iterator it = code.getAttributes().iterator();
- while (it.hasNext()) {
- JAttribute attr = (JAttribute)it.next();
- buf.append(attr);
- buf.append("\n");
- }
- return buf.toString();
- }
-
- protected int getSize() {
- int handlersNum = code.getExceptionHandlers().size();
-
- int attrsSize = 0;
- Iterator attrsIt = code.getAttributes().iterator();
- while (attrsIt.hasNext()) {
- JAttribute attr = (JAttribute)attrsIt.next();
- attrsSize += attr.getSize() + 6;
- }
-
- return 2 // max stack
- + 2 // max locals
- + 4 // code size
- + code.getSize() // code
- + 2 // exception table size
- + 8 * handlersNum // exception table
- + 2 // attributes count
- + attrsSize; // attributes
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- List/*<ExceptionHandler>*/ handlers = code.getExceptionHandlers();
-
- stream.writeShort(code.getMaxStackSize());
- stream.writeShort(owner.getMaxLocals());
-
- code.writeTo(stream);
-
- stream.writeShort(handlers.size());
- Iterator handlerIt = handlers.iterator();
- while (handlerIt.hasNext())
- ((JCode.ExceptionHandler)handlerIt.next()).writeTo(stream);
- JAttribute.writeTo(code.getAttributes(), stream);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
deleted file mode 100644
index d09dfd19a4..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import ch.epfl.lamp.util.ByteArray;
-
-/**
- * Iterator used to examine the contents of an instruction list.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JCodeIterator {
- protected final JCode code;
- protected final JConstantPool pool;
- protected final ByteArray codeArray;
-
- protected int pc;
- protected JOpcode opcode;
-
- /**
- * Creates a new code iterator with its instruction list
- * and its pc initialized to a given value.
- */
- public JCodeIterator(JCode code, int pc) {
- this.code = code;
- this.pool = code.getOwner().getOwner().getConstantPool();
- this.codeArray = code.codeArray;
- this.pc = pc;
- setOpcode();
- }
-
- public JCodeIterator(JCode code) {
- this(code, 0);
- }
-
- /**
- * Get the current program counter.
- * @return The current program counter.
- */
- public int getPC() { return pc; }
-
- /**
- * Searches the type of the instruction positionned at the
- * current address and updates the current instruction.
- */
- protected void setOpcode() {
- // TODO : check if the current pc is the beginning
- // of an instruction
- opcode = isValid() ? JOpcode.OPCODES[codeArray.getU1(pc)] : null;
- }
-
- /**
- * Returns the opcode of the current instruction.
- * @return The opcode of the current instruction.
- */
- public JOpcode getOpcode() {
- return opcode;
- }
-
- /**
- * Updates the program counter to an given value.
- * @param pc The new value of the program counter.
- */
- public void moveTo(int pc) {
- this.pc = pc;
- setOpcode();
- }
-
- /**
- * Check the validity of the iterator.
- * @return true iff the iterator points to a valid address.
- */
- public boolean isValid() {
- return pc < codeArray.getSize();
- }
-
- /**
- * Updates the current instruction with the next one in the
- * sense of their position in the code.
- */
- public void moveToNext() {
- moveTo(pc + getInstructionSize());
- }
-
- /**
- * Updates the current instruction with a specific successor
- * of it.
- * @param succ The index of the wanted successor in the list of
- * the successors of the current instruction.
- */
- public void moveToSuccessor(int succ) {
- moveTo(getSuccessorPC(succ));
- }
-
- /**
- * Updates the current instruction with the one positionned
- * at a given index relatively to the actual program counter
- * @param offset The relative position of the instruction
- * compared with the position of the current one
- */
- public void moveRelatively(int offset) {
- moveTo(pc + offset);
- }
-
- /**
- * Returns the size in bytes of the current instruction.
- * @return The size in bytes of the current instruction.
- */
- public int getInstructionSize() {
- if (opcode.size != JOpcode.UNKNOWN) {
- return opcode.size;
- } else if (opcode == JOpcode.TABLESWITCH) {
- int lowOffset = 1 + pad4(pc + 1) + 4;
- int low = codeArray.getS4(pc + lowOffset);
- int high = codeArray.getS4(pc + lowOffset + 4);
- return lowOffset + 8 + 4 * (high - low + 1);
- } else if (opcode == JOpcode.LOOKUPSWITCH) {
- int npairsOffset = 1 + pad4(pc + 1) + 4;
- int npairs = codeArray.getS4(pc + npairsOffset);
- return npairsOffset + 4 + 8 * npairs;
- } else if (opcode == JOpcode.WIDE) {
- if (codeArray.getU1(pc + 1) == JOpcode.cIINC)
- return 6;
- else
- return 4;
- } else
- throw new Error("Unknown size for instruction " + opcode);
- }
-
- /**
- * Returns the number of successors of the current instruction.
- * @return The number of successors of the current instruction.
- */
- public int getSuccessorCount() {
- if (opcode.successorCount != JOpcode.UNKNOWN) {
- return opcode.successorCount;
- } else if (opcode == JOpcode.TABLESWITCH) {
- int lowPos = pc + 1 + pad4(pc + 1) + 4;
- return 1 // default case
- + codeArray.getS4(lowPos + 4) // value of HIGH field
- - codeArray.getS4(lowPos) + 1; // value of LOW field
- } else if (opcode == JOpcode.LOOKUPSWITCH) {
- int npairsPos = pc + 1 + pad4(pc + 1) + 4;
- return 1 + codeArray.getS4(npairsPos);
- } else
- throw new Error("Unknown successors for instruction " + opcode);
- }
-
- /**
- * Returns the address of the successor of the current instruction
- * given its index in the list of successors of the current
- * instruction.
- * @param index The index of the wanted successor in the list of
- * the successors of the current instruction.
- * @return The address of the specific successor.
- */
- public int getSuccessorPC(int index) {
- assert (index >= 0) && (index < getSuccessorCount()) : index;
-
- switch (opcode.jumpKind) {
- case JOpcode.JMP_NEXT:
- return pc + getInstructionSize();
- case JOpcode.JMP_ALWAYS_S2_OFFSET:
- return pc + codeArray.getS2(pc + 1);
- case JOpcode.JMP_ALWAYS_S4_OFFSET:
- return pc + codeArray.getS4(pc + 1);
- case JOpcode.JMP_MAYBE_S2_OFFSET:
- if (index == 0)
- return pc + getInstructionSize();
- else
- return pc + codeArray.getS2(pc + 1);
- case JOpcode.JMP_TABLE: {
- int defaultPos = pc + 1 + pad4(pc + 1);
- if (index == 0)
- return pc + codeArray.getS4(defaultPos);
- else
- return pc + codeArray.getS4(defaultPos + 3*4 + 4 * (index - 1));
- }
- case JOpcode.JMP_LOOKUP: {
- int defaultPos = pc + 1 + pad4(pc + 1);
- if (index == 0)
- return pc + codeArray.getS4(defaultPos);
- else
- return pc + codeArray.getS4(defaultPos + 2*4 + 4 + 8 * (index - 1));
- }
- default:
- throw new Error();
- }
- }
-
- /**
- * Returns the total size of data words put on the stack by the current
- * instruction.
- * @return The total size of data words put on the stack by the current
- * instruction.
- */
- public int getProducedDataSize() {
- if (opcode.getProducedDataTypes() == JOpcode.UNKNOWN_TYPE) {
- switch (opcode.code) {
- case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cBALOAD:
- return 1;
- case JOpcode.cLDC2_W: case JOpcode.cDUP: case JOpcode.cSWAP:
- return 2;
- case JOpcode.cDUP_X1:
- return 3;
- case JOpcode.cDUP_X2: case JOpcode.cDUP2:
- return 4;
- case JOpcode.cDUP2_X1:
- return 5;
- case JOpcode.cDUP2_X2:
- return 6;
- case JOpcode.cGETSTATIC: case JOpcode.cGETFIELD: {
- JConstantPool.FieldOrMethodRefEntry entry =
- (JConstantPool.FieldOrMethodRefEntry)
- pool.lookupEntry(codeArray.getU2(pc + 1));
- return JType.parseSignature(entry.getSignature()).getSize();
- }
- case JOpcode.cWIDE : {
- int op = codeArray.getU1(pc + 1);
- if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD) {
- JOpcode opcode2 = JOpcode.OPCODES[op];
- return JType.getTotalSize(opcode2.getProducedDataTypes());
- } else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE)
- return 0;
- else return 0; // (IINC)
- }
- default :
- throw new Error(opcode.toString());
- }
- } else
- return JType.getTotalSize(opcode.getProducedDataTypes());
- }
-
- /**
- * Returns the total size of data words taken from the stack by the current
- * instruction.
- * @return The total size of data words taken from the stack by the current
- * instruction.
- */
- public int getConsumedDataSize() {
- if (opcode.getConsumedDataTypes() != JOpcode.UNKNOWN_TYPE)
- return JType.getTotalSize(opcode.getConsumedDataTypes());
- else {
- switch (opcode.code) {
- case JOpcode.cPOP: case JOpcode.cDUP:
- return 1;
- case JOpcode.cPOP2: case JOpcode.cSWAP:
- case JOpcode.cDUP_X1: case JOpcode.cDUP2:
- return 2;
- case JOpcode.cDUP_X2: case JOpcode.cDUP2_X1:
- return 3;
- case JOpcode.cDUP2_X2:
- return 4;
- case JOpcode.cPUTSTATIC: case JOpcode.cPUTFIELD: {
- JConstantPool.FieldOrMethodRefEntry entry =
- (JConstantPool.FieldOrMethodRefEntry)
- pool.lookupEntry(codeArray.getU2(pc + 1));
- return JType.parseSignature(entry.getSignature()).getSize();
- }
- case JOpcode.cINVOKEVIRTUAL: case JOpcode.cINVOKESPECIAL:
- case JOpcode.cINVOKESTATIC: case JOpcode.cINVOKEINTERFACE : {
- JConstantPool.FieldOrMethodRefEntry entry =
- (JConstantPool.FieldOrMethodRefEntry)
- pool.lookupEntry(codeArray.getU2(pc + 1));
- JMethodType tp = (JMethodType)
- JType.parseSignature(entry.getSignature());
- return tp.getArgsSize()
- + (opcode == JOpcode.INVOKESTATIC ? 0 : 1);
- }
- case JOpcode.cWIDE : {
- int op = codeArray.getU1(pc + 1);
- if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD)
- return 0;
- else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE) {
- JOpcode opcode2 = JOpcode.OPCODES[op];
- return JType.getTotalSize(opcode2.getConsumedDataTypes());
- } else
- return 0; // (IINC)
- }
- case JOpcode.cMULTIANEWARRAY :
- return codeArray.getU1(pc + 3);
- default:
- throw new Error(opcode.toString());
- }
- }
- }
-
- /**
- * Returns the number of data types put on the stack by the current
- * instruction.
- * @return The number of data types put on the stack by the current
- * instruction.
- */
- public int getProducedDataTypesNumber() {
- if (opcode.getProducedDataTypes() != JOpcode.UNKNOWN_TYPE)
- return opcode.getProducedDataTypes().length;
- else {
- switch (opcode.code) {
- case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cLDC2_W:
- case JOpcode.cBALOAD: case JOpcode.cGETSTATIC:
- case JOpcode.cGETFIELD:
- return 1;
- case JOpcode.cDUP: case JOpcode.cSWAP:
- return 2;
- case JOpcode.cDUP_X1:
- return 3;
- case JOpcode.cWIDE: {
- int op = codeArray.getU1(pc + 1);
- if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD)
- return 1;
- else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE)
- return 0;
- else
- return 0; // (IINC)
- }
- default:
- throw new Error("JOpcode implementation error");
- }
- }
- }
-
- /**
- * Returns the number of data types taken from the stack by the current
- * instruction.
- * @return The number of data types taken from the stack by the current
- * instruction.
- */
-// public int getConsumedDataTypesNumber() {
-// if (opcode.getConsumedDataTypes() == JOpcode.UNKNOWN_TYPE) {
-// switch (opcode.code) {
-// case 87 : return 1; // POP
-// case 88 : return 2; // POP2
-// case 89 : return 1; // DUP
-// case 90 : return 2; // DUP_X1
-// case 91 : // DUP_X2
-// case 92 : // DUP2
-// case 93 : // DUP2_X1
-// case 94 : // DUP2_X2
-// throw new UnsupportedOperationException("Opcode " + opcode.name
-// + " has a stack-dependant"
-// + " data types consumption");
-// case 95 : return 2; // SWAP
-// case 179 : return 1; // PUTSTATIC
-// case 181 : return 1; // PUTFIELD
-// case 182 : // INVOKEVIRTUAL
-// case 183 : // INVOKESPECIAL
-// case 185 : // INVOKEINTERFACE
-// s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3];
-// return ((JMethodType)JType.parseSignature(s)).argTypes.length + 1;
-// case 184 : // INVOKESTATIC
-// s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3];
-// return ((JMethodType)JType.parseSignature(s)).argTypes.length;
-// case 196 : // WIDE
-// int op = codeArray.getU1(pc + 1);
-// if (op >= 21 && op <= 25) return 0; // (xLOAD)
-// else if (op >= 54 && op <= 58) // (xSTORE)
-// return JOpcode.OPCODES[op].getConsumedDataTypes().length;
-// else return 0; // (IINC)
-// case 197 : return codeArray.getU1(pc + 3); // MULTIANEWARRAY
-// default : throw new Error("JOpcode implementation error");
-// }
-// } else return opcode.getConsumedDataTypes().length;
-// }
-
-
- // Return the number between 0 and 3 which, if added to the given
- // value, would yield a multiple of 4.
- protected int[] padding = { 0, 3, 2, 1 };
- protected int pad4(int value) {
- return padding[value % 4];
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
deleted file mode 100644
index 9867e01b25..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
+++ /dev/null
@@ -1,771 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.HashMap;
-
-/**
- * Constant pool, holding constants for a Java class file.
- *
- * @author Michel Schinz
- * @version 2.0
- */
-
-public class JConstantPool {
- protected boolean frozen = false;
-
- protected HashMap/*<Entry,Integer>*/ entryToIndex = new HashMap();
- protected Entry[] indexToEntry;
- protected int currIndex;
-
- public static final short CONSTANT_Utf8 = 1;
- public static final short CONSTANT_Integer = 3;
- public static final short CONSTANT_Float = 4;
- public static final short CONSTANT_Long = 5;
- public static final short CONSTANT_Double = 6;
- public static final short CONSTANT_Class = 7;
- public static final short CONSTANT_String = 8;
- public static final short CONSTANT_Fieldref = 9;
- public static final short CONSTANT_Methodref = 10;
- public static final short CONSTANT_InterfaceMethodref = 11;
- public static final short CONSTANT_NameAndType = 12;
-
- protected JConstantPool(FJBGContext context) {
- indexToEntry = new Entry[8];
- currIndex = 1;
- }
-
- protected JConstantPool(FJBGContext context, DataInputStream stream)
- throws IOException {
- int count = stream.readShort();
- indexToEntry = new EntryIndex[count];
-
- currIndex = 1;
- while (currIndex < count) {
- EntryIndex e;
- int tag = stream.readByte();
-
- switch (tag) {
- case CONSTANT_Utf8:
- e = new Utf8Entry(stream);
- // no duplicates
- entryToIndex.put(e, new Integer(currIndex));
- break;
- case CONSTANT_Integer:
- e = new IntegerEntry(stream);
- break;
- case CONSTANT_Float:
- e = new FloatEntry(stream);
- break;
- case CONSTANT_Long:
- e = new LongEntry(stream);
- break;
- case CONSTANT_Double:
- e = new DoubleEntry(stream);
- break;
- case CONSTANT_Class:
- e = new DescriptorEntryIndex(stream);
- break;
- case CONSTANT_String:
- e = new StringEntryIndex(stream);
- break;
- case CONSTANT_Fieldref:
- case CONSTANT_Methodref:
- case CONSTANT_InterfaceMethodref:
- e = new FieldOrMethodRefEntryIndex(tag, stream);
- break;
- case CONSTANT_NameAndType:
- e = new NameAndTypeEntryIndex(stream);
- break;
- default:
- throw new IllegalArgumentException("unknown entry in pool: " + tag);
- }
- indexToEntry[currIndex] = e;
- currIndex += e.getSize();
- }
- }
-
- public void freeze() { frozen = true; }
-
- /**
- * Returns a string representing the type of an entry
- * knowing its tag
- * @param tag The tag representing the type of the
- * constant pool entry
- */
- public String getEntryType(int tag) {
- switch (tag) {
- case CONSTANT_Utf8 : return "Utf8";
- case CONSTANT_Integer : return "Integer";
- case CONSTANT_Float : return "Float";
- case CONSTANT_Long : return "Long";
- case CONSTANT_Double : return "Double";
- case CONSTANT_Class : return "Class";
- case CONSTANT_String : return "String";
- case CONSTANT_Fieldref : return "Field";
- case CONSTANT_Methodref : return "Method";
- case CONSTANT_InterfaceMethodref : return "InterfaceMethod";
- case CONSTANT_NameAndType : return "NameAndType";
- default : throw new Error("invalid constant pool tag : " + tag);
- }
- }
-
- public int addClass(String className) {
- return addDescriptor(className.replace('.', '/'));
- }
-
- public int addDescriptor(JReferenceType type) {
- return addDescriptor(type.getDescriptor());
- }
-
- protected int addDescriptor(String name) {
- return addEntry(new DescriptorEntryValue(name));
- }
-
- public int addClassMethodRef(String className,
- String methodName,
- String signature) {
- return addMethodRef(true, className, methodName, signature);
- }
-
- public int addInterfaceMethodRef(String className,
- String methodName,
- String signature) {
- return addMethodRef(false, className, methodName, signature);
- }
-
- public int addMethodRef(boolean isClass,
- String className,
- String methodName,
- String signature) {
- return addEntry(new FieldOrMethodRefEntryValue(isClass
- ? CONSTANT_Methodref
- : CONSTANT_InterfaceMethodref,
- className,
- methodName,
- signature));
- }
-
- public int addFieldRef(String className,
- String fieldName,
- String signature) {
- return addEntry(new FieldOrMethodRefEntryValue(CONSTANT_Fieldref,
- className,
- fieldName,
- signature));
- }
-
- public int addInteger(int value) {
- return addEntry(new IntegerEntry(value));
- }
-
- public int addFloat(float value) {
- return addEntry(new FloatEntry(value));
- }
-
- public int addLong(long value) {
- return addEntry(new LongEntry(value));
- }
-
- public int addDouble(double value) {
- return addEntry(new DoubleEntry(value));
- }
-
- public int addString(String value) {
- return addEntry(new StringEntryValue(value));
- }
-
- public int addNameAndType(String name, String descriptor) {
- return addEntry(new NameAndTypeEntryValue(name, descriptor));
- }
-
- public int addUtf8(String value) {
- return addEntry(new Utf8Entry(value));
- }
-
- public int addUtf8(byte[] value) {
- return addEntry(new Utf8Entry(value));
- }
-
- protected int addEntry(EntryValue e) {
- assert !frozen;
- Integer idx = (Integer)entryToIndex.get(e);
- if (idx != null)
- return idx.intValue();
-
- e.addChildren();
-
- int index = currIndex;
- currIndex += e.getSize();
-
- entryToIndex.put(e, new Integer(index));
- if (index >= indexToEntry.length) {
- Entry[] newI2E = new Entry[indexToEntry.length * 2];
- System.arraycopy(indexToEntry, 0, newI2E, 0, indexToEntry.length);
- indexToEntry = newI2E;
- }
- indexToEntry[index] = e;
- return index;
- }
-
- /// Lookup methods
- //////////////////////////////////////////////////////////////////////
-
- public Entry lookupEntry(int index) {
- assert index > 0 && index < currIndex
- : "invalid index: " + index;
- assert indexToEntry[index] != null
- : "invalid index (null contents): " + index;
- return indexToEntry[index];
- }
-
- public String lookupClass(int index) {
- DescriptorEntry entry = (DescriptorEntry)lookupEntry(index);
- return entry.getValue();
- }
-
- public String lookupNameAndType(int index) {
- NameAndTypeEntry entry = (NameAndTypeEntry)lookupEntry(index);
- return entry.getName()+":"+entry.getDescriptor();
- }
-
- public String lookupUtf8(int index) {
- Utf8Entry entry = (Utf8Entry)lookupEntry(index);
- return entry.getValue();
- }
-
- /// Output
- //////////////////////////////////////////////////////////////////////
-
- public void writeTo(DataOutputStream stream) throws IOException {
- if (! frozen) freeze();
-
- stream.writeShort(currIndex);
- for (int i = 0; i < currIndex; ++i) {
- Entry entry = indexToEntry[i];
- if (entry != null) {
- stream.writeByte(entry.getTag());
- entry.writeContentsTo(stream);
- }
- }
- }
-
- // Follows javap output format for constant pool.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Constant pool:");
- for (int i = 0; i < currIndex; ++i) {
- Entry entry = indexToEntry[i];
- if (entry != null) {
- if (i > 0) buf.append("\n");
- buf.append("const #");
- buf.append(i);
- buf.append(" = ");
- buf.append(entry);
- }
- }
- buf.append("\n");
- return buf.toString();
- }
-
- /// Classes for the various kinds of entries
- //////////////////////////////////////////////////////////////////////
-
- public interface Entry {
- public int getTag();
-
- int getSize();
- void writeContentsTo(DataOutputStream stream) throws IOException;
- String toComment(String ownerClassName);
- }
-
- protected interface EntryValue extends Entry {
- abstract void addChildren();
- }
-
- protected interface EntryIndex extends Entry {
- abstract void fetchChildren();
- }
-
- abstract protected class ChildlessEntry implements EntryValue, EntryIndex {
- public void addChildren() {}
- public void fetchChildren() {}
- }
-
- public class IntegerEntry extends ChildlessEntry implements Entry {
- private final int value;
- public IntegerEntry(int value) { this.value = value; }
- public IntegerEntry(DataInputStream stream) throws IOException {
- this(stream.readInt());
- }
-
- public int hashCode() { return value; }
- public boolean equals(Object o) {
- return o instanceof IntegerEntry && ((IntegerEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Integer; }
- public int getValue() { return value; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeInt(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("int\t");
- buf.append(getValue());
- buf.append(";");
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//int "+getValue();
- }
- }
-
- public class FloatEntry extends ChildlessEntry implements Entry {
- private final float value;
- public FloatEntry(float value) { this.value = value; }
- public FloatEntry(DataInputStream stream) throws IOException {
- this(stream.readFloat());
- }
-
- public int hashCode() { return (int)value; }
- public boolean equals(Object o) {
- return o instanceof FloatEntry && ((FloatEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Float; }
- public float getValue() { return value; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeFloat(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("float\t");
- buf.append(getValue());
- buf.append("f");
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//float "+getValue()+"f";
- }
- }
-
- public class LongEntry extends ChildlessEntry implements Entry {
- private final long value;
- public LongEntry(long value) { this.value = value; }
- public LongEntry(DataInputStream stream) throws IOException {
- this(stream.readLong());
- }
-
- public int hashCode() { return (int)value; }
- public boolean equals(Object o) {
- return o instanceof LongEntry && ((LongEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Long; }
- public long getValue() { return value; }
-
- public int getSize() { return 2; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeLong(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("long\t");
- buf.append(getValue());
- buf.append("l;");
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//long "+getValue()+"l";
- }
- }
-
- public class DoubleEntry extends ChildlessEntry implements Entry {
- private final double value;
- public DoubleEntry(double value) { this.value = value; }
- public DoubleEntry(DataInputStream stream) throws IOException {
- this(stream.readDouble());
- }
-
- public int hashCode() { return (int)value; }
- public boolean equals(Object o) {
- return o instanceof DoubleEntry && ((DoubleEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Double; }
- public double getValue() { return value; }
-
- public int getSize() { return 2; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeDouble(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("double\t");
- buf.append(getValue());
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//double "+getValue();
- }
- }
-
- public class Utf8Entry extends ChildlessEntry implements Entry {
- private final String value;
- private final byte[] bytes;
- public Utf8Entry(String value) {
- this.value = value.intern();
- this.bytes = null;
- }
- public Utf8Entry(DataInputStream stream) throws IOException {
- this(stream.readUTF());
- }
- public Utf8Entry(byte[] bytes) {
- this.bytes = bytes;
- this.value = null;
- }
-
- public int hashCode() {
- if (bytes != null) return bytes.hashCode();
- return value.hashCode();
- }
- public boolean equals(Object o) {
- boolean isEqual = o instanceof Utf8Entry;
- if (bytes != null) {
- isEqual = isEqual && ((Utf8Entry)o).bytes == bytes;
- }
- else {
- isEqual = isEqual && ((Utf8Entry)o).value == value;
- }
- return isEqual;
- }
-
- public int getTag() { return CONSTANT_Utf8; }
- public String getValue() { return value; }
- public byte[] getBytes() { return bytes; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- if (bytes != null) {
- if (bytes.length > 65535) {
- throw new IOException("String literal of length " + bytes.length + " does not fit in Classfile");
- }
- stream.writeShort(bytes.length);
- stream.write(bytes);
- }
- else
- stream.writeUTF(value);
- }
- // Follows javap output format for Utf8 pool entries.
- public String toString() { return "Asciz\t"+escaped(getValue())+";"; }
- public String toComment(String ownerClassname) {
- return "//Asciz "+escaped(getValue());
- }
- private String escaped(String s) {
- return s.replace("\n", "\\n");
- }
- }
-
- abstract public class StringEntry implements Entry {
- protected String value;
- protected int valueIndex;
-
- public int hashCode() {
- assert value != null;
- return value.hashCode();
- }
- public boolean equals(Object o) {
- return o instanceof StringEntry && ((StringEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_String; }
- public String getValue() { return value; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(valueIndex);
- }
- // Follows javap output format for String pool entries.
- public String toString() {
- return "String\t#"+valueIndex+";\t// "+escaped(getValue());
- }
- public String toComment(String ownerClassname) {
- return "//String "+escaped(getValue());
- }
- private String escaped(String s) {
- return s.replace("\n", "\\n");
- }
- }
-
- public class StringEntryValue extends StringEntry implements EntryValue {
- public StringEntryValue(String value) {
- this.value = value.intern();
- }
- public void addChildren() {
- valueIndex = addUtf8(value);
- }
- }
-
- public class StringEntryIndex extends StringEntry implements EntryIndex {
- public StringEntryIndex(int valueIndex) {
- this.valueIndex = valueIndex;
- }
- public StringEntryIndex(DataInputStream stream) throws IOException {
- this(stream.readShort());
- }
- public String getValue() {
- if (value == null) fetchChildren();
- return super.getValue();
- }
- public void fetchChildren() {
- value = lookupUtf8(valueIndex);
- }
- }
-
- abstract public class DescriptorEntry implements Entry {
- protected String name;
- protected int nameIndex;
-
- public int hashCode() {
- assert name != null;
- return name.hashCode();
- }
- public boolean equals(Object o) {
- return o instanceof DescriptorEntry && ((DescriptorEntry)o).name == name;
- }
-
- public int getTag() { return CONSTANT_Class; }
- public String getValue() { return name; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(nameIndex);
- }
- // Follows javap output format for class pool entries.
- public String toString() {
- StringBuffer buf = new StringBuffer("class\t#");
- buf.append(nameIndex);
- buf.append(";\t// ");
- buf.append(getClassName());
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//class "+getClassName();
- }
- private String getClassName() {
- StringBuffer buf = new StringBuffer();
- String value = getValue();
- if (value.startsWith("[")) buf.append("\"");
- buf.append(value);
- if (value.startsWith("[")) buf.append("\"");
- return buf.toString();
- }
- }
-
- protected class DescriptorEntryValue
- extends DescriptorEntry
- implements EntryValue {
- public DescriptorEntryValue(String name) { this.name = name.intern(); }
- public void addChildren() {
- nameIndex = addUtf8(name);
- }
- }
-
- protected class DescriptorEntryIndex
- extends DescriptorEntry
- implements EntryIndex {
- public DescriptorEntryIndex(int nameIndex) { this.nameIndex = nameIndex; }
- public DescriptorEntryIndex(DataInputStream stream) throws IOException {
- this(stream.readShort());
- }
- public String getValue() {
- if (name == null) fetchChildren();
- return super.getValue();
- }
- public void fetchChildren() {
- name = lookupUtf8(nameIndex);
- }
- }
-
- abstract public class FieldOrMethodRefEntry implements Entry {
- private final int tag;
- protected String className, thingName, signature;
- protected int classIndex, nameAndTypeIndex;
-
- public FieldOrMethodRefEntry(int tag) {
- assert tag == CONSTANT_Fieldref
- || tag == CONSTANT_Methodref
- || tag == CONSTANT_InterfaceMethodref;
-
- this.tag = tag;
- }
-
- public int hashCode() {
- return tag
- + className.hashCode()
- + thingName.hashCode()
- + signature.hashCode();
- }
- public boolean equals(Object o) {
- return o instanceof FieldOrMethodRefEntry
- && ((FieldOrMethodRefEntry)o).tag == tag
- && ((FieldOrMethodRefEntry)o).className == className
- && ((FieldOrMethodRefEntry)o).thingName == thingName
- && ((FieldOrMethodRefEntry)o).signature == signature;
- }
-
- public int getTag() { return tag; }
- public String getClassName() { return className; }
- public String getFieldOrMethodName() { return thingName; }
- public String getSignature() { return signature; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(classIndex);
- stream.writeShort(nameAndTypeIndex);
- }
- // Follows javap output format for field/method pool entries.
- public String toString() {
- return getEntryType(tag)+"\t#"+classIndex+".#"+nameAndTypeIndex+
- ";\t// "+getName("")+":"+signature;
- }
- public String toComment(String ownerClassName) {
- return "//"+getEntryType(tag)+" "+getName(ownerClassName)+":"+signature;
- }
- private String getName(String ownerClassName) {
- String name = getFieldOrMethodName();
- if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(name))
- name = "\""+name+"\"";
- if (!getClassName().equals(ownerClassName))
- name = getClassName()+"."+name;
- return name;
- }
- }
-
- protected class FieldOrMethodRefEntryValue
- extends FieldOrMethodRefEntry
- implements EntryValue {
- public FieldOrMethodRefEntryValue(int tag,
- String className,
- String thingName,
- String signature) {
- super(tag);
- this.className = className.intern();
- this.thingName = thingName.intern();
- this.signature = signature.intern();
- }
-
- public void addChildren() {
- classIndex = addClass(className);
- nameAndTypeIndex = addNameAndType(thingName, signature);
- }
- }
-
- protected class FieldOrMethodRefEntryIndex
- extends FieldOrMethodRefEntry
- implements EntryIndex {
- public FieldOrMethodRefEntryIndex(int tag,
- int classIndex,
- int nameAndTypeIndex) {
- super(tag);
- this.classIndex = classIndex;
- this.nameAndTypeIndex = nameAndTypeIndex;
- }
- public FieldOrMethodRefEntryIndex(int tag, DataInputStream stream)
- throws IOException {
- this(tag, stream.readShort(), stream.readShort());
- }
- public String getClassName() {
- if (className == null) fetchChildren();
- return super.getClassName();
- }
- public String getFieldOrMethodName() {
- if (thingName == null) fetchChildren();
- return super.getFieldOrMethodName();
- }
- public String getSignature() {
- if (signature == null) fetchChildren();
- return super.getSignature();
- }
- public void fetchChildren() {
- className = lookupClass(classIndex);
- NameAndTypeEntry nat = (NameAndTypeEntry)lookupEntry(nameAndTypeIndex);
- thingName = nat.getName();
- signature = nat.getDescriptor();
- }
- }
-
- abstract public class NameAndTypeEntry implements Entry {
- protected String name, descriptor;
- protected int nameIndex, descriptorIndex;
-
- public int hashCode() { return name.hashCode() + descriptor.hashCode(); }
- public boolean equals(Object o) {
- return o instanceof NameAndTypeEntry
- && ((NameAndTypeEntry)o).name == name
- && ((NameAndTypeEntry)o).descriptor == descriptor;
- }
-
- public int getTag() { return CONSTANT_NameAndType; }
- public String getName() { return name; }
- public String getDescriptor() { return descriptor; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(nameIndex);
- stream.writeShort(descriptorIndex);
- }
- // Follows javap output format for name/type pool entries.
- public String toString() {
- String natName = getName();
- if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(natName))
- natName = "\""+natName+"\"";
- return "NameAndType\t#"+nameIndex+":#"+descriptorIndex+
- ";// "+natName+":"+getDescriptor();
- }
- public String toComment(String ownerClassname) { return ""; }
- }
-
- protected class NameAndTypeEntryValue
- extends NameAndTypeEntry
- implements EntryValue {
- public NameAndTypeEntryValue(String name, String descriptor) {
- this.name = name.intern();
- this.descriptor = descriptor.intern();
- }
- public void addChildren() {
- nameIndex = addUtf8(name);
- descriptorIndex = addUtf8(descriptor);
- }
- }
-
- protected class NameAndTypeEntryIndex
- extends NameAndTypeEntry
- implements EntryIndex {
- public NameAndTypeEntryIndex(int nameIndex, int descriptorIndex) {
- this.nameIndex = nameIndex;
- this.descriptorIndex = descriptorIndex;
- }
- public NameAndTypeEntryIndex(DataInputStream stream) throws IOException {
- this(stream.readShort(), stream.readShort());
- }
- public String getName() {
- if (name == null) fetchChildren();
- return super.getName();
- }
- public String getDescriptor() {
- if (descriptor == null) fetchChildren();
- return super.getDescriptor();
- }
- public void fetchChildren() {
- name = lookupUtf8(nameIndex);
- descriptor = lookupUtf8(descriptorIndex);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
deleted file mode 100644
index 6ee05e43c7..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * ConstantValue attribute representing the value of a constant field.
- *
- * There can be no more than one ConstantValue attribute in the attributes
- * table of a given field_info structure.. See section 4.8.2 of the JVM
- * specification.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JConstantValueAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected int constantValueIndex;
-
- public JConstantValueAttribute(FJBGContext context,
- JClass clazz,
- JField field) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- assert field.getOwner() == clazz;
- }
-
- public JConstantValueAttribute(FJBGContext context,
- JClass clazz,
- Object owner, // JField
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- this.constantValueIndex = stream.readShort();
-
- assert name.equals(getName());
- }
-
- public String getName() { return "ConstantValue"; }
-
- // Follows javap output format for ConstantValue attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Constant value: ");
- buf.append(pool.lookupEntry(constantValueIndex));
- return buf.toString();
- }
-
- protected int getSize() {
- return 2; // Short.SIZE
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(constantValueIndex);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
deleted file mode 100644
index f663f00ae1..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * EclosingMethod attribute
-
- * A class must have an EnclosingMethod attribute if and only if it is a
- * local class or an anonymous class. A class may have no more than one
- * EnclosingMethod attribute. See section 4.8.6 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JEnclosingMethodAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected final int classIdx;
- protected final int nameAndTypeIdx;
-
- public JEnclosingMethodAttribute(FJBGContext context,
- JClass clazz,
- String className,
- String methodName,
- JType methodType) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- this.classIdx = pool.addClass(className);
- this.nameAndTypeIdx = pool.addNameAndType(methodName, methodType.getSignature());
- }
-
- public JEnclosingMethodAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- this.classIdx = stream.readShort();
- this.nameAndTypeIdx = stream.readShort();
-
- assert name.equals(getName());
- }
-
- public String getName() { return "EnclosingMethod"; }
-
- // Follows javap output format for EnclosingMethod attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" EnclosingMethod:");
- buf.append("\n #");
- buf.append(classIdx);
- if (nameAndTypeIdx != 0) {
- buf.append(" of #");
- buf.append(nameAndTypeIdx);
- }
- buf.append(";\t// ");
- buf.append(pool.lookupEntry(classIdx));
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 4; // 2 * Short.SIZE
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(classIdx);
- stream.writeShort(nameAndTypeIdx);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
deleted file mode 100644
index b91d0f2e93..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Exceptions attribute
-
- * This table is used by compilers to indicate which Exceptions a method
- * is declared to throw. See section 2.6.4 of the JVM specification.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JExceptionsAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected int[] indexTable;
- protected int count;
-
- public JExceptionsAttribute(FJBGContext context,
- JClass clazz,
- JMethod owner) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- this.count = 0;
- this.indexTable = new int[8]; // some size > count
-
- assert clazz == owner.getOwner();
- }
-
- public JExceptionsAttribute(FJBGContext context,
- JClass clazz,
- Object owner, //JMethod
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- this.count = stream.readShort();
- this.indexTable = new int[count];
- for (int i = 0; i < count; ++i)
- indexTable[i] = stream.readShort();
-
- assert name.equals(getName());
- }
-
- public void addEntry(int classIndex) {
- if (count >= indexTable.length) {
- int[] newIT = new int[indexTable.length * 2];
- System.arraycopy(indexTable, 0, newIT, 0, indexTable.length);
- indexTable = newIT;
- }
- indexTable[count++] = classIndex;
- }
-
- public String getName() { return "Exceptions"; }
-
- // Follows javap output format for Exceptions attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Exceptions: ");
- for (int i = 0; i < indexTable.length; ++i) {
- buf.append("\n throws ");
- buf.append(JClass.toExternalName(pool.lookupClass(indexTable[i])));
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 2 + indexTable.length * 2;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(count);
- for (int i = 0; i < count; ++i)
- stream.writeShort(indexTable[i]);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
deleted file mode 100644
index d82db8289f..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
+++ /dev/null
@@ -1,667 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Extended list of instructions, providing pseudo-instructions which
- * are easier to use than the standard ones.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JExtendedCode extends JCode {
- public final static int COND_EQ = 0;
- public final static int COND_NE = 1;
- public final static int COND_LT = 2;
- public final static int COND_GE = 3;
- public final static int COND_GT = 4;
- public final static int COND_LE = 5;
-
- private final JOpcode[] forbidden = new JOpcode[0];
- private final JOpcode[] nothingToDo = new JOpcode[0];
-
- private final JOpcode[][][] typeConversions = {
- {
- /* T_BOOLEAN -> T_BOOLEAN */ nothingToDo,
- /* T_BOOLEAN -> T_CHAR */ forbidden,
- /* T_BOOLEAN -> T_FLOAT */ forbidden,
- /* T_BOOLEAN -> T_DOUBLE */ forbidden,
- /* T_BOOLEAN -> T_BYTE */ forbidden,
- /* T_BOOLEAN -> T_SHORT */ forbidden,
- /* T_BOOLEAN -> T_INT */ forbidden,
- /* T_BOOLEAN -> T_LONG */ forbidden
- },
- {
- /* T_CHAR -> T_BOOLEAN */ forbidden,
- /* T_CHAR -> T_CHAR */ nothingToDo,
- /* T_CHAR -> T_FLOAT */ {JOpcode.I2F},
- /* T_CHAR -> T_DOUBLE */ {JOpcode.I2D},
- /* T_CHAR -> T_BYTE */ {JOpcode.I2B},
- /* T_CHAR -> T_SHORT */ {JOpcode.I2S},
- /* T_CHAR -> T_INT */ nothingToDo,
- /* T_CHAR -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_FLOAT -> T_BOOLEAN */ forbidden,
- /* T_FLOAT -> T_CHAR */ {JOpcode.F2I, JOpcode.I2C},
- /* T_FLOAT -> T_FLOAT */ nothingToDo,
- /* T_FLOAT -> T_DOUBLE */ {JOpcode.F2D},
- /* T_FLOAT -> T_BYTE */ {JOpcode.F2I, JOpcode.I2B},
- /* T_FLOAT -> T_SHORT */ {JOpcode.F2I, JOpcode.I2S},
- /* T_FLOAT -> T_INT */ {JOpcode.F2I},
- /* T_FLOAT -> T_LONG */ {JOpcode.F2L}
- },
- {
- /* T_DOUBLE -> T_BOOLEAN */ forbidden,
- /* T_DOUBLE -> T_CHAR */ {JOpcode.D2I, JOpcode.I2C},
- /* T_DOUBLE -> T_FLOAT */ {JOpcode.D2F},
- /* T_DOUBLE -> T_DOUBLE */ nothingToDo,
- /* T_DOUBLE -> T_BYTE */ {JOpcode.D2I, JOpcode.I2B},
- /* T_DOUBLE -> T_SHORT */ {JOpcode.D2I, JOpcode.I2S},
- /* T_DOUBLE -> T_INT */ {JOpcode.D2I},
- /* T_DOUBLE -> T_LONG */ {JOpcode.D2L}
- },
- {
- /* T_BYTE -> T_BOOLEAN */ forbidden,
- /* T_BYTE -> T_CHAR */ {JOpcode.I2C},
- /* T_BYTE -> T_FLOAT */ {JOpcode.I2F},
- /* T_BYTE -> T_DOUBLE */ {JOpcode.I2D},
- /* T_BYTE -> T_BYTE */ nothingToDo,
- /* T_BYTE -> T_SHORT */ nothingToDo,
- /* T_BYTE -> T_INT */ nothingToDo,
- /* T_BYTE -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_SHORT -> T_BOOLEAN */ forbidden,
- /* T_SHORT -> T_CHAR */ {JOpcode.I2C},
- /* T_SHORT -> T_FLOAT */ {JOpcode.I2F},
- /* T_SHORT -> T_DOUBLE */ {JOpcode.I2D},
- /* T_SHORT -> T_BYTE */ {JOpcode.I2B},
- /* T_SHORT -> T_SHORT */ nothingToDo,
- /* T_SHORT -> T_INT */ nothingToDo,
- /* T_SHORT -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_INT -> T_BOOLEAN */ forbidden,
- /* T_INT -> T_CHAR */ {JOpcode.I2C},
- /* T_INT -> T_FLOAT */ {JOpcode.I2F},
- /* T_INT -> T_DOUBLE */ {JOpcode.I2D},
- /* T_INT -> T_BYTE */ {JOpcode.I2B},
- /* T_INT -> T_SHORT */ {JOpcode.I2S},
- /* T_INT -> T_INT */ nothingToDo,
- /* T_INT -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_LONG -> T_BOOLEAN */ forbidden,
- /* T_LONG -> T_CHAR */ {JOpcode.L2I, JOpcode.I2C},
- /* T_LONG -> T_FLOAT */ {JOpcode.L2F},
- /* T_LONG -> T_DOUBLE */ {JOpcode.L2D},
- /* T_LONG -> T_BYTE */ {JOpcode.L2I, JOpcode.I2B},
- /* T_LONG -> T_SHORT */ {JOpcode.L2I, JOpcode.I2S},
- /* T_LONG -> T_INT */ {JOpcode.L2I},
- /* T_LONG -> T_LONG */ nothingToDo
- }
- };
-
- public JExtendedCode(FJBGContext context,
- JClass clazz,
- JMethod owner) {
- super(context, clazz, owner);
- }
-
- public void emitPUSH(boolean value) { emitPUSH(value ? 1 : 0); }
- public void emitPUSH(Boolean value) { emitPUSH(value.booleanValue()); }
-
- public void emitPUSH(byte value) {
- switch (value) {
- case -1: emitICONST_M1(); break;
- case 0: emitICONST_0(); break;
- case 1: emitICONST_1(); break;
- case 2: emitICONST_2(); break;
- case 3: emitICONST_3(); break;
- case 4: emitICONST_4(); break;
- case 5: emitICONST_5(); break;
- default:
- emitBIPUSH(value);
- }
- }
- public void emitPUSH(Byte value) { emitPUSH(value.byteValue()); }
-
- public void emitPUSH(short value) {
- switch (value) {
- case -1: emitICONST_M1(); break;
- case 0: emitICONST_0(); break;
- case 1: emitICONST_1(); break;
- case 2: emitICONST_2(); break;
- case 3: emitICONST_3(); break;
- case 4: emitICONST_4(); break;
- case 5: emitICONST_5(); break;
- default:
- if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE)
- emitBIPUSH((byte)value);
- else
- emitSIPUSH(value);
- }
- }
- public void emitPUSH(Short value) { emitPUSH(value.shortValue()); }
-
- // TODO check that we do the right thing here
- public void emitPUSH(char value) { emitPUSH((int)value); }
- public void emitPUSH(Character value) { emitPUSH(value.charValue()); }
-
- public void emitPUSH(int value) {
- switch (value) {
- case -1: emitICONST_M1(); break;
- case 0: emitICONST_0(); break;
- case 1: emitICONST_1(); break;
- case 2: emitICONST_2(); break;
- case 3: emitICONST_3(); break;
- case 4: emitICONST_4(); break;
- case 5: emitICONST_5(); break;
- default:
- if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE)
- emitBIPUSH((byte)value);
- else if (value >= Short.MIN_VALUE && value <= Short.MAX_VALUE)
- emitSIPUSH((short)value);
- else
- emitPUSH_index(pool.addInteger(value));
- break;
- }
- }
- public void emitPUSH(Integer value) { emitPUSH(value.intValue()); }
-
- public void emitPUSH(long value) {
- if (value == 0L)
- emitLCONST_0();
- else if (value == 1L)
- emitLCONST_1();
- else
- emitLDC2_W(value);
- }
- public void emitPUSH(Long value) { emitPUSH(value.longValue()); }
-
- private static final Float ZEROF = Float.valueOf(0f);
- private static final Float ONEF = Float.valueOf(1f);
- private static final Float TWOF = Float.valueOf(2f);
- public void emitPUSH(Float value) {
- if (ZEROF.equals(value))
- emitFCONST_0();
- else if (ONEF.equals(value))
- emitFCONST_1();
- else if (TWOF.equals(value))
- emitFCONST_2();
- else
- emitPUSH_index(pool.addFloat(value.floatValue()));
- }
- public void emitPUSH(float value) { emitPUSH(Float.valueOf(value)); }
-
- private static final Double ZEROD = Double.valueOf(0d);
- private static final Double ONED = Double.valueOf(1d);
- public void emitPUSH(Double value) {
- if (ZEROD.equals(value))
- emitDCONST_0();
- else if (ONED.equals(value))
- emitDCONST_1();
- else
- emitLDC2_W(value.doubleValue());
- }
- public void emitPUSH(double value) { emitPUSH(Double.valueOf(value)); }
-
- public void emitPUSH(String s) {
- emitPUSH_index(pool.addString(s));
- }
-
- /** Pushes a class literal on the stack */
- public void emitPUSH(JReferenceType type) {
- assert owner.owner.major >= 49;
- emitPUSH_index(pool.addClass(type.getDescriptor()));
- }
-
- protected void emitPUSH_index(int index) {
- if (index <= 0xFF)
- emitU1(JOpcode.LDC, index);
- else
- emitU2(JOpcode.LDC_W, index);
- }
-
- public void emitLOAD(int index, JType type) {
- JOpcode opcode;
-
- switch (type.getTag()) {
- case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
- case JType.T_SHORT: case JType.T_INT:
- switch (index) {
- case 0: emitILOAD_0(); return;
- case 1: emitILOAD_1(); return;
- case 2: emitILOAD_2(); return;
- case 3: emitILOAD_3(); return;
- default: opcode = JOpcode.ILOAD;
- } break;
- case JType.T_FLOAT:
- switch (index) {
- case 0: emitFLOAD_0(); return;
- case 1: emitFLOAD_1(); return;
- case 2: emitFLOAD_2(); return;
- case 3: emitFLOAD_3(); return;
- default: opcode = JOpcode.FLOAD;
- } break;
- case JType.T_LONG:
- switch (index) {
- case 0: emitLLOAD_0(); return;
- case 1: emitLLOAD_1(); return;
- case 2: emitLLOAD_2(); return;
- case 3: emitLLOAD_3(); return;
- default: opcode = JOpcode.LLOAD;
- } break;
- case JType.T_DOUBLE:
- switch (index) {
- case 0: emitDLOAD_0(); return;
- case 1: emitDLOAD_1(); return;
- case 2: emitDLOAD_2(); return;
- case 3: emitDLOAD_3(); return;
- default: opcode = JOpcode.DLOAD;
- } break;
- case JType.T_ARRAY: case JType.T_OBJECT:
- switch (index) {
- case 0: emitALOAD_0(); return;
- case 1: emitALOAD_1(); return;
- case 2: emitALOAD_2(); return;
- case 3: emitALOAD_3(); return;
- default: opcode = JOpcode.ALOAD;
- } break;
- default:
- throw new IllegalArgumentException("invalid type for load "+type);
- }
-
- if (index > 0xFF)
- emitWIDE(opcode, index);
- else
- emitU1(opcode, index);
- }
- public void emitLOAD(JLocalVariable var) {
- emitLOAD(var.index, var.type);
- }
-
- public void emitSTORE(int index, JType type) {
- JOpcode opcode;
-
- switch (type.getTag()) {
- case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
- case JType.T_SHORT: case JType.T_INT:
- switch (index) {
- case 0: emitISTORE_0(); return;
- case 1: emitISTORE_1(); return;
- case 2: emitISTORE_2(); return;
- case 3: emitISTORE_3(); return;
- default: opcode = JOpcode.ISTORE;
- } break;
- case JType.T_FLOAT:
- switch (index) {
- case 0: emitFSTORE_0(); return;
- case 1: emitFSTORE_1(); return;
- case 2: emitFSTORE_2(); return;
- case 3: emitFSTORE_3(); return;
- default: opcode = JOpcode.FSTORE;
- } break;
- case JType.T_LONG:
- switch (index) {
- case 0: emitLSTORE_0(); return;
- case 1: emitLSTORE_1(); return;
- case 2: emitLSTORE_2(); return;
- case 3: emitLSTORE_3(); return;
- default: opcode = JOpcode.LSTORE;
- } break;
- case JType.T_DOUBLE:
- switch (index) {
- case 0: emitDSTORE_0(); return;
- case 1: emitDSTORE_1(); return;
- case 2: emitDSTORE_2(); return;
- case 3: emitDSTORE_3(); return;
- default: opcode = JOpcode.DSTORE;
- } break;
- case JType.T_ARRAY: case JType.T_OBJECT: case JType.T_ADDRESS:
- switch (index) {
- case 0: emitASTORE_0(); return;
- case 1: emitASTORE_1(); return;
- case 2: emitASTORE_2(); return;
- case 3: emitASTORE_3(); return;
- default: opcode = JOpcode.ASTORE;
- } break;
- default:
- throw new IllegalArgumentException("invalid type for store "+type);
- }
-
- if (index > 0xFF)
- emitWIDE(opcode, index);
- else
- emitU1(opcode, index);
- }
- public void emitSTORE(JLocalVariable var) {
- emitSTORE(var.index, var.type);
- }
-
- public void emitALOAD(JType type) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN:
- case JType.T_BYTE:
- emitBALOAD();
- break;
- case JType.T_CHAR:
- emitCALOAD();
- break;
- case JType.T_SHORT:
- emitSALOAD();
- break;
- case JType.T_INT:
- emitIALOAD();
- break;
- case JType.T_FLOAT:
- emitFALOAD();
- break;
- case JType.T_LONG:
- emitLALOAD();
- break;
- case JType.T_DOUBLE:
- emitDALOAD();
- break;
- case JType.T_ARRAY:
- case JType.T_OBJECT:
- emitAALOAD();
- break;
- default:
- throw new IllegalArgumentException("invalid type for aload " + type);
- }
- }
-
- public void emitASTORE(JType type) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN:
- case JType.T_BYTE:
- emitBASTORE();
- break;
- case JType.T_CHAR:
- emitCASTORE();
- break;
- case JType.T_SHORT:
- emitSASTORE();
- break;
- case JType.T_INT:
- emitIASTORE();
- break;
- case JType.T_FLOAT:
- emitFASTORE();
- break;
- case JType.T_LONG:
- emitLASTORE();
- break;
- case JType.T_DOUBLE:
- emitDASTORE();
- break;
- case JType.T_ARRAY:
- case JType.T_OBJECT:
- emitAASTORE();
- break;
- default:
- throw new IllegalArgumentException("invalid type for astore " + type);
- }
- }
-
- public void emitRETURN(JType type) {
- if (type.isValueType()) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN:
- case JType.T_BYTE:
- case JType.T_CHAR:
- case JType.T_SHORT:
- case JType.T_INT:
- emitIRETURN();
- break;
- case JType.T_FLOAT:
- emitFRETURN();
- break;
- case JType.T_LONG:
- emitLRETURN();
- break;
- case JType.T_DOUBLE:
- emitDRETURN();
- break;
- }
- } else if (type.isArrayType() || type.isObjectType())
- emitARETURN();
- else if (type == JType.VOID)
- emitRETURN();
- else
- throw new IllegalArgumentException("invalid type for RETURN " + type);
- }
-
- public void emitADD(JType type) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
- case JType.T_SHORT: case JType.T_INT:
- emitIADD(); break;
- case JType.T_FLOAT:
- emitFADD(); break;
- case JType.T_LONG:
- emitLADD(); break;
- case JType.T_DOUBLE:
- emitDADD(); break;
- }
- }
-
- /**
- * Emits a basic type conversion instruction choosen according to the
- * types given in parameter.
- *
- * @param fromType The type of the value to be cast into another type.
- * @param toType The type the value will be cast into.
- */
- public void emitT2T(JType fromType, JType toType) {
- assert fromType.getTag() >= JType.T_BOOLEAN
- && fromType.getTag() <= JType.T_LONG
- && toType.getTag() >= JType.T_BOOLEAN
- && toType.getTag() <= JType.T_LONG;
-
- JOpcode[] conv = typeConversions[fromType.getTag() - 4][toType.getTag() - 4];
- if (conv == forbidden) {
- throw new Error("inconvertible types : " + fromType.toString()
- + " -> " + toType.toString());
- } else if (conv != nothingToDo) {
- for (int i = 0; i < conv.length; i++) {
- emit(conv[i]);
- }
- }
- }
-
- public void emitIF(int cond, Label label) throws OffsetTooBigException {
- assert cond >= COND_EQ && cond <= COND_LE;
- emitU2(JOpcode.OPCODES[153 + cond], label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitIF(int cond, int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- emitU2(JOpcode.OPCODES[153 + cond], offset);
- }
- public void emitIF(int cond) throws OffsetTooBigException {
- emitIF(cond, 0);
- }
-
- public void emitIF_ICMP(int cond, Label label) throws OffsetTooBigException {
- assert cond >= COND_EQ && cond <= COND_LE;
- emitU2(JOpcode.OPCODES[159 + cond], label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitIF_ICMP(int cond, int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- emitU2(JOpcode.OPCODES[159 + cond], offset);
- }
- public void emitIF_ICMP(int cond) throws OffsetTooBigException {
- emitIF_ICMP(cond, 0);
- }
-
- public void emitIF_ACMP(int cond, Label label) throws OffsetTooBigException {
- assert cond == COND_EQ || cond == COND_NE;
- emitU2(JOpcode.OPCODES[165 + cond], label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitIF_ACMP(int cond, int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- emitU2(JOpcode.OPCODES[165 + cond], offset);
- }
- public void emitIF_ACMP(int cond) throws OffsetTooBigException {
- emitIF_ACMP(cond, 0);
- }
-
- public void emitGOTO_maybe_W(Label label, boolean defaultToWide) {
- if (label.anchored)
- emitGOTO_maybe_W(label.targetPC);
- else {
- if (defaultToWide)
- emitGOTO_W(label);
- else {
- try {
- emitGOTO(label);
- } catch (OffsetTooBigException e) {
- throw new Error(e);
- }
- }
- }
- }
-
- public void emitGOTO_maybe_W(int targetPC) {
- int offset = targetPC - (getPC() + 1);
- if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE)
- emitGOTO_W(targetPC);
- else {
- try {
- emitGOTO(targetPC);
- } catch (OffsetTooBigException e) {
- throw new Error(e);
- }
- }
- }
-
- /**
- * Emits a switch instruction choosen according to the caracteristics
- * of the given list of keys and a default maxRate.
- *
- * @param keySets The array of all keys that must be compared to the
- * value on stack.
- * @param branches The labels representing the jump addresses linked
- * with the corresponding keys.
- * @param defaultBranch The label representing the default branch
- * address.
- */
- public void emitSWITCH(int[][] keySets,
- Label[] branches,
- Label defaultBranch,
- double minDensity) {
- assert keySets.length == branches.length;
-
- int flatSize = 0;
- for (int i = 0; i < keySets.length; ++i)
- flatSize += keySets[i].length;
-
- int[] flatKeys = new int[flatSize];
- Label[] flatBranches = new Label[flatSize];
- int flatI = 0;
- for (int i = 0; i < keySets.length; ++i) {
- Label branch = branches[i];
- int[] keys = keySets[i];
- for (int j = 0; j < keys.length; ++j) {
- flatKeys[flatI] = keys[j];
- flatBranches[flatI] = branch;
- }
- ++flatI;
- }
- assert flatI == flatSize;
- emitSWITCH(flatKeys, flatBranches, defaultBranch, minDensity);
- }
-
- /**
- * Emits a switch instruction choosen according to the caracteristics
- * of the given list of keys and a given maxRate.
- *
- * @param keys The array of all keys that must be compared to the
- * value on stack.
- * @param branches The labels representing the jump addresses linked
- * with the corresponding keys.
- * @param defaultBranch The label representing the default branch
- * address.
- * @param minDensity The minimum density to use for TABLESWITCH.
- */
- public void emitSWITCH(int[] keys,
- Label[] branches,
- Label defaultBranch,
- double minDensity) {
- assert keys.length == branches.length;
-
- //The special case for empty keys. It makes sense to allow
- //empty keys and generate LOOKUPSWITCH with defaultBranch
- //only. This is exactly what javac does for switch statement
- //that has only a default case.
- if (keys.length == 0) {
- emitLOOKUPSWITCH(keys, branches, defaultBranch);
- return;
- }
- //the rest of the code assumes that keys.length > 0
-
- // sorting the tables
- // FIXME use quicksort
- for (int i = 1; i < keys.length; i++) {
- for (int j = 1; j <= keys.length - i; j++) {
- if (keys[j] < keys[j - 1]) {
- int tmp = keys[j];
- keys[j] = keys[j - 1];
- keys[j - 1] = tmp;
-
- Label tmp_l = branches[j];
- branches[j] = branches[j - 1];
- branches[j - 1] = tmp_l;
- }
- }
- }
-
- int keyMin = keys[0], keyMax = keys[keys.length - 1];
- /** Calculate in long to guard against overflow. */
- long keyRange = (long)keyMax - keyMin + 1;
- if ((double)keys.length / (double)keyRange >= minDensity) {
- // Keys are dense enough, use a table in which holes are
- // filled with defaultBranch.
- int[] newKeys = new int[(int)keyRange];
- Label[] newBranches = new Label[(int)keyRange];
- int oldPos = 0;
- for (int i = 0; i < keyRange; ++i) {
- int key = keyMin + i;
- newKeys[i] = key;
- if (keys[oldPos] == key) {
- newBranches[i] = branches[oldPos];
- ++oldPos;
- } else
- newBranches[i] = defaultBranch;
- }
- assert oldPos == keys.length;
- emitTABLESWITCH(newKeys, newBranches, defaultBranch);
- } else
- emitLOOKUPSWITCH(keys, branches, defaultBranch);
- }
-
- /**
- * Emits a method invocation instruction choosen according to
- * the caracteristics of the given method.
- *
- * @param method The method to be invoked.
- */
- public void emitINVOKE(JMethod method) {
- String mName = method.getName();
- String cName = method.getOwner().getName();
- JMethodType mType = (JMethodType)method.getType();
- if (method.isStatic())
- emitINVOKESTATIC(cName, mName, mType);
- else if (method.getOwner().isInterface())
- emitINVOKEINTERFACE(cName, mName, mType);
- else
- emitINVOKEVIRTUAL(cName, mName, mType);
- }
-
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JField.java b/src/fjbg/ch/epfl/lamp/fjbg/JField.java
deleted file mode 100644
index 29d826ba99..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JField.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-/**
- * Java class field.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JField extends JFieldOrMethod {
-
- protected JField(FJBGContext context,
- JClass owner,
- int accessFlags,
- String name,
- JType type) {
- super(context, owner, accessFlags, name, type);
- }
-
- protected JField(FJBGContext context,
- JClass owner,
- DataInputStream stream)
- throws IOException {
- super(context, owner, stream);
- }
-
- // Follows javap output format for fields.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(flagsToString());
- buf.append(toExternalName(getType()));
- buf.append(" ");
- buf.append(getName());
- buf.append(";\n");
- java.util.Iterator attrsIt = attributes.iterator();
- while (attrsIt.hasNext()) {
- JAttribute attrs = (JAttribute)attrsIt.next();
- buf.append(attrs);
- }
- return buf.toString();
- }
-
- private String flagsToString() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- if (isStatic()) buf.append("static ");
- else if (isTransient()) buf.append("transient ");
- else if (isVolatile()) buf.append("volatile ");
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
deleted file mode 100644
index 794c0f13b5..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Abstract superclass for a Java field or method.
- *
- * No two methods of fields in one class file may have the same name and
- * descriptor. See sections 4.6 and 4.7 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-abstract public class JFieldOrMethod extends JMember {
-
- protected final JClass owner;
- protected final JType type;
-
- protected final int nameIndex, signatureIndex;
-
- protected JFieldOrMethod(FJBGContext context,
- JClass owner,
- int accessFlags,
- String name,
- JType type) {
- super(context, accessFlags, name);
- this.owner = owner;
- this.type = type;
-
- nameIndex = owner.pool.addUtf8(name);
- signatureIndex = owner.pool.addUtf8(type.getSignature());
- }
-
- protected JFieldOrMethod(FJBGContext context,
- JClass owner,
- DataInputStream stream)
- throws IOException {
- super(context);
- this.owner = owner;
- this.accessFlags = stream.readShort();
- this.nameIndex = stream.readShort();
- this.name = owner.pool.lookupUtf8(nameIndex);
- this.signatureIndex = stream.readShort();
- this.type = JType.parseSignature(owner.pool.lookupUtf8(signatureIndex));
- this.attributes.addAll(JAttribute.readFrom(context, owner, this, stream));
- }
-
- public void freeze() throws JCode.OffsetTooBigException {
- assert !frozen;
- frozen = true;
- }
-
- public JClass getOwner() { return owner; }
-
- public JType getType() { return type; }
-
- public JClass getJClass() { return owner; }
-
- public boolean isPublic() {
- return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0;
- }
-
- public boolean isPrivate() {
- return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0;
- }
-
- public boolean isProtected() {
- return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0;
- }
-
- public boolean isStatic() {
- return (accessFlags & JAccessFlags.ACC_STATIC) != 0;
- }
-
- public boolean isFinal() {
- return (accessFlags & JAccessFlags.ACC_FINAL) != 0;
- }
-
- public boolean isSuper() {
- return (accessFlags & JAccessFlags.ACC_SUPER) != 0;
- }
-
- public boolean isVolatile() {
- return (accessFlags & JAccessFlags.ACC_VOLATILE) != 0;
- }
-
- public boolean isTransient() {
- return (accessFlags & JAccessFlags.ACC_TRANSIENT) != 0;
- }
-
- public boolean isNative() {
- return (accessFlags & JAccessFlags.ACC_NATIVE) != 0;
- }
-
- public boolean isInterface() {
- return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0;
- }
-
- public boolean isAbstract() {
- return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0;
- }
-
- public boolean isStrict() {
- return (accessFlags & JAccessFlags.ACC_STRICT) != 0;
- }
-
- // 1.5 specifics
- public boolean isBridge() {
- return (accessFlags & JAccessFlags.ACC_BRIDGE) != 0;
- }
-
- public boolean hasVarargs() {
- return (accessFlags & JAccessFlags.ACC_VARARGS) != 0;
- }
-
- public void writeTo(DataOutputStream stream) throws IOException {
- if (! frozen) {
- try {
- freeze();
- }
- catch (JCode.OffsetTooBigException e) {
- throw new Error(e);
- }
- }
- stream.writeShort(accessFlags);
- stream.writeShort(nameIndex);
- stream.writeShort(signatureIndex);
- JAttribute.writeTo(getAttributes(), stream);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
deleted file mode 100644
index 1c1ced500d..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * InnerClasses attribute.
- *
- * The ClassFile structure of a class/interface C must have exactly one
- * InnerClasses attribute in its attributes table if the constant pool of C
- * contains a CONSTANT_Class_info entry which represents a class or interface
- * that is not a member of a package. See section 4.8.5 of the JVM Specification.
- *
- * @author Iulian Dragos, Stephane Micheloud
- * @version 1.1
- */
-public class JInnerClassesAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- /** InnerClass entries */
- private Map/*<String, Entry>*/ entries = new LinkedHashMap();
-
- public JInnerClassesAttribute(FJBGContext context, JClass clazz) {
- super(context, clazz);
- this.pool = clazz.pool;
- }
-
- public JInnerClassesAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- String inner = null;
- int count = stream.readShort();
- for (int i = 0; i < count; ++i) {
- int innerIdx = stream.readShort();
- int outerIdx = stream.readShort();
- int nameIdx = stream.readShort();
- int flags = stream.readShort();
- inner = pool.lookupClass(innerIdx);
- entries.put(inner, new Entry(innerIdx, outerIdx, nameIdx, flags));
- }
-
- assert name.equals(getName());
- }
-
- public void addEntry(String inner, String outer, String name, int flags) {
- int innerIdx = pool.addClass(inner);
- int outerIdx = 0;
- if (outer != null) outerIdx = pool.addClass(outer);
- int nameIdx = 0;
- if (name != null) nameIdx = pool.addUtf8(name);
-
- Entry e = new Entry(innerIdx, outerIdx, nameIdx, flags);
-
- if (entries.containsKey(inner)) {
- Entry other = (Entry) entries.get(inner);
- assert other.outerInfo == e.outerInfo && other.originalName == e.originalName && other.innerFlags == e.innerFlags
- : inner + " already declared as " + other;
- } else
- entries.put(inner, e);
- }
-
- public String getName() { return "InnerClasses"; }
-
- // Follows javap output format for the InnerClass attribute.
- /*@Override*/ public String toString() {
- // Here we intentionally use "InnerClass" as javap :-(
- StringBuffer buf = new StringBuffer(" InnerClass: ");
- for (Iterator it = entries.values().iterator(); it.hasNext(); ) {
- Entry e = (Entry)it.next();
- buf.append("\n ");
- buf.append(e.innerFlagsToString());
- buf.append("#");
- if (e.originalName != 0) {
- buf.append(e.originalName);
- buf.append("= #");
- }
- buf.append(e.innerInfo);
- if (e.outerInfo != 0) {
- buf.append(" of #");
- buf.append(e.outerInfo);
- }
- buf.append("; //");
- if (e.originalName != 0) {
- buf.append(pool.lookupUtf8(e.originalName));
- buf.append("=");
- }
- buf.append("class ");
- buf.append(pool.lookupClass(e.innerInfo));
- if (e.outerInfo != 0) {
- buf.append(" of class ");
- buf.append(pool.lookupClass(e.outerInfo));
- }
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 2 + entries.size() * 8;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(entries.size());
- for (Iterator it = entries.values().iterator(); it.hasNext(); ) {
- Entry e = (Entry)it.next();
- stream.writeShort(e.innerInfo);
- stream.writeShort(e.outerInfo);
- stream.writeShort(e.originalName);
- stream.writeShort(e.innerFlags);
- }
- }
-
- /** An entry in the InnerClasses attribute, as defined by the JVM Spec. */
- private class Entry {
- /** CONSTANT_Class_info index in the pool for the inner class (mangled). */
- int innerInfo;
-
- /** CONSTANT_Class_info index in the pool for the outer class (mangled). */
- int outerInfo;
-
- /** CONSTANT_Utf8_info index in the pool for the original name of the inner class. */
- int originalName;
-
- /** Short int for modifier flags. */
- int innerFlags;
-
- public Entry(int iI, int oI, int oN, int f) {
- this.innerInfo = iI;
- this.outerInfo = oI;
- this.originalName = oN;
- this.innerFlags = f;
- }
-
- public Entry(String innerClass, String outerClass, String name, int flags) {
- this(pool.addClass(innerClass), pool.addClass(outerClass), pool.addUtf8(name), flags);
- }
-
- /** Two entries are equal if they refer to the same inner class.
- * innerInfo represents a unique name (mangled).
- */
- public boolean equals(Object other) {
- if (other instanceof Entry) {
- Entry otherEntry = (Entry) other;
- return otherEntry.innerInfo == this.innerInfo;
- }
- return false;
- }
-
- public String innerFlagsToString() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- //if (isStatic()) buf.append("static "); // as javap
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- return buf.toString();
- }
-
- private boolean isPublic() {
- return (innerFlags & JAccessFlags.ACC_PUBLIC) != 0;
- }
-
- private boolean isPrivate() {
- return (innerFlags & JAccessFlags.ACC_PRIVATE) != 0;
- }
-
- private boolean isProtected() {
- return (innerFlags & JAccessFlags.ACC_PROTECTED) != 0;
- }
-
- private boolean isStatic() {
- return (innerFlags & JAccessFlags.ACC_STATIC) != 0;
- }
-
- private boolean isFinal() {
- return (innerFlags & JAccessFlags.ACC_FINAL) != 0;
- }
-
- private boolean isAbstract() {
- return (innerFlags & JAccessFlags.ACC_ABSTRACT) != 0;
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
deleted file mode 100644
index 96f3b4ebef..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Labels which can be attached to instructions.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLabel {
- public final static int UNDEFINED_ANCHOR = -1;
- protected int anchor = UNDEFINED_ANCHOR;
-
- public boolean isAnchored() { return anchor != UNDEFINED_ANCHOR; }
-
- public int getAnchor() {
- assert isAnchored();
- return anchor;
- }
-
- public void setAnchor(int anchor) {
- assert !isAnchored();
- this.anchor = anchor;
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
deleted file mode 100644
index f8c09b8ef8..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Attribute storing correspondance between instructions and source
- * line numbers.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLineNumberTableAttribute extends JAttribute {
- protected final JCode code;
-
- public JLineNumberTableAttribute(FJBGContext context,
- JClass clazz,
- JCode owner) {
- super(context, clazz);
- this.code = owner;
-
- assert owner.getOwner().getOwner() == clazz;
- }
-
- public JLineNumberTableAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.code = (JCode)owner;
-
- int[] mapping = new int[code.getSize()];
-
- int count = stream.readShort();
- for (int i = 0; i < count; ++i) {
- int startPC = stream.readShort();
- int lineNum = stream.readShort();
- mapping[startPC] = lineNum;
- }
-
- // Avoids duplication of LineNumberTable attribute
- // (see method ensureLineNumberCapacity in class JCode).
- assert code.lineNumbers == null;
- code.lineNumbers = new int[0];
-
- int lineNum = 0;
- for (int pc = 0; pc < mapping.length; ++pc) {
- if (mapping[pc] != 0) lineNum = mapping[pc];
- if (lineNum != 0) code.setLineNumber(pc, lineNum);
- }
-
- assert name.equals(getName());
- }
-
- public String getName() { return "LineNumberTable"; }
-
- // Follows javap output format for LineNumberTable attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" LineNumberTable: ");
- int[] encoding = encode();
- for (int i = 0; i < encoding.length/2; ++i) {
- buf.append("\n line ");
- buf.append(encoding[i * 2 + 1]);
- buf.append(": ");
- buf.append(encoding[i * 2]);
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int[] encoding;
- protected int[] encode() {
- if (encoding == null) {
- int[] lineNumbers = code.getLineNumbers();
- int[] preEncoding = new int[lineNumbers.length * 2];
- int prevLineNum = 0;
-
- int i = 0;
- for (int pc = 0; pc < lineNumbers.length; ++pc) {
- int lineNum = lineNumbers[pc];
- if (lineNum != 0 & lineNum != prevLineNum) {
- preEncoding[i++] = pc;
- preEncoding[i++] = lineNum;
- prevLineNum = lineNum;
- }
- }
- if (i == preEncoding.length)
- encoding = preEncoding;
- else {
- encoding = new int[i];
- System.arraycopy(preEncoding, 0, encoding, 0, i);
- }
- }
- return encoding;
- }
-
- protected int getSize() {
- int[] encoding = encode();
- return 2 + encoding.length * 2;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- int[] encoding = encode();
- int entries = encoding.length / 2;
- stream.writeShort(entries);
- for (int i = 0; i < entries; ++i) {
- stream.writeShort(encoding[i * 2]);
- stream.writeShort(encoding[i * 2 + 1]);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
deleted file mode 100644
index af7980656f..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Representation of a local variable or method argument.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLocalVariable {
- protected final JMethod owner;
- protected final JType type;
- protected final String name;
- protected final int index;
-
- protected JLocalVariable(FJBGContext context,
- JMethod owner,
- JType type,
- String name,
- int index) {
- this.owner = owner;
- this.type = type;
- this.name = name;
- this.index = index;
-
- assert index < 0xFFFF : "index too big for local variable: " + index;
- }
-
- public JMethod getOwner() { return owner; }
- public int getIndex() { return index; }
- public String getName() { return name; }
- public JType getType() { return type; }
-
- /*@Override*/ public String toString() {
- return "0\t"+type.getSize()+"\t"+index+"\t"+name+"\t"+type;
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
deleted file mode 100644
index b277cc71c0..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedList;
-
-import ch.epfl.lamp.fjbg.JConstantPool.*;
-
-/**
- * Attribute storing local variables.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JLocalVariableTableAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected final LinkedList/*<Entry>*/ entries = new LinkedList();
- protected int localVariableIndex = 0;
-
- public JLocalVariableTableAttribute(FJBGContext context,
- JClass clazz,
- JCode code) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- assert code.getOwner().getOwner() == clazz;
- }
-
- public JLocalVariableTableAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- int count = stream.readShort();
- for (int i = 0; i < count; ++i) {
- int startPc = stream.readShort();
- int length = stream.readShort();
- int nameIndex = stream.readShort();
- int descIndex = stream.readShort();
- int index = stream.readShort();
- addEntry(startPc, length, nameIndex, descIndex, index);
- }
-
- assert name.equals(getName());
- }
-
- public void addEntry(int startPc, int length, int nameIndex,
- int descIndex, int index) {
- entries.add(new Entry(startPc, length, nameIndex, descIndex, index));
- }
-
- public void addEntry(int startPc, int length, String name,
- String desc, int index) {
- Entry e = new Entry(startPc, length, name, desc, index);
- Entry other = getEntry(index);
- if (other != null) {
- assert other.nameIndex == e.nameIndex && other.descIndex == e.descIndex
- : e + " already declared as " + other;
- } else
- entries.add(e);
- }
-
- public void addEntry(int startPc, int length, String name, String desc) {
- entries.add(new Entry(startPc, length, name, desc));
- }
-
- public String getName() { return "LocalVariableTable"; }
-
- // Follows javap output format for LocalVariableTable attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" LocalVariableTable: ");
- buf.append("\n Start Length Slot Name Signature");
- for (Iterator it = entries.iterator(); it.hasNext(); ) {
- buf.append("\n ");
- Entry e = (Entry)it.next();
- Utf8Entry name = (Utf8Entry)pool.lookupEntry(e.nameIndex);
- Utf8Entry sig = (Utf8Entry)pool.lookupEntry(e.descIndex);
- buf.append(e.startPc);
- buf.append(" ");
- buf.append(e.length);
- buf.append(" ");
- buf.append(e.index);
- buf.append(" ");
- buf.append(name.getValue());
- buf.append(" ");
- buf.append(sig.getValue());
- }
- buf.append("\n");
- return buf.toString();
- }
-
- public int getMaxLocals() {
- return localVariableIndex;
- }
-
- public int getSize() {
- return 2 + entries.size() * 10;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(entries.size());
- for (Iterator it = entries.iterator(); it.hasNext(); ) {
- Entry e = (Entry)it.next();
- stream.writeShort(e.startPc);
- stream.writeShort(e.length);
- stream.writeShort(e.nameIndex);
- stream.writeShort(e.descIndex);
- stream.writeShort(e.index);
- }
- }
-
- private Entry getEntry(int index) {
- Entry e = null;
- try { e = (Entry)entries.get(index); } catch (Exception ex) {}
- return e;
- }
-
- private class Entry {
- int startPc;
- int length;
- int nameIndex;
- int descIndex;
- int index;
-
- public Entry(int startPc, int length, int nameIndex, int descIndex, int index) {
- this.startPc = startPc;
- this.length = length;
- this.nameIndex = nameIndex;
- this.descIndex = descIndex;
- this.index = index;
- localVariableIndex += length;
- }
-
- public Entry(int startPc, int length, String name, String desc, int index) {
- this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), index);
- }
-
- public Entry(int startPc, int length, String name, String desc) {
- this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), localVariableIndex);
- }
-
- /** Two entries are equal if they refer to the same index.
- */
- public boolean equals(Object other) {
- if (other instanceof Entry) {
- Entry otherEntry = (Entry) other;
- return otherEntry.index == this.index;
- }
- return false;
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
deleted file mode 100644
index 6356cc874d..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Abstract superclass for a Java class, field or method.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-
-abstract public class JMember {
-
- protected boolean frozen = false;
-
- protected final FJBGContext context;
-
- protected String name;
-
- protected int accessFlags;
-
- protected final List/*<JAttribute>*/ attributes = new LinkedList();
-
- protected JMember(FJBGContext context) { this.context = context; }
-
- protected JMember(FJBGContext context, int accessFlags, String name) {
- this(context);
- this.name = name;
- this.accessFlags = accessFlags;
- }
-
- /**
- * Gets the access flags of the class.
- * @return The int representing the access flags of the class.
- */
- public int getAccessFlags() { return accessFlags; }
-
- /**
- * Gets the name of the member.
- * @return The string representing the name of the member.
- */
- public String getName() { return name; }
-
- /**
- * Gets the type of the objects that are instances of the class.
- * @return The type of the instances of the class.
- */
- public abstract JType getType();
-
- /**
- * Gets the class corresponding to/owning this member
- * @return The class owning this member or the class itself.
- */
- public abstract JClass getJClass();
-
- /**
- * Gets the constant pool of the class.
- * @return The constant pool of the class.
- */
- public JConstantPool getConstantPool() { return getJClass().getConstantPool(); }
-
- public FJBGContext getContext() { return context; }
-
- /**
- * Adds an attribute to the class.
- * @param attr The attribute to be added.
- */
- public void addAttribute(JAttribute attr) {
- assert !frozen;
- attributes.add(attr);
- }
-
- /**
- * Gets the list of all attributes of the class.
- * @return The list of the attributes of the class representation.
- */
- public List/*<JAttribute>*/ getAttributes() {
- return attributes;
- }
-
- /**
- * Get the attribute with the given name, or null if it doesn't
- * exist.
- */
- public JAttribute getAttribute(String name) {
- Iterator attrIt = getAttributes().iterator();
- while (attrIt.hasNext()) {
- JAttribute attr = (JAttribute)attrIt.next();
- if (attr.getName().equals(name))
- return attr;
- }
- return null;
- }
-
- protected static String toExternalName(String name) {
- return name.replace('/', '.');
- }
-
- protected static String toExternalName(JType tpe) {
- return tpe.toString().replace(':', '.');
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
deleted file mode 100644
index 01d58a45c7..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Representation of a Java method.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JMethod extends JFieldOrMethod {
- public final static String CLASS_CONSTRUCTOR_NAME = "<clinit>";
- public final static String INSTANCE_CONSTRUCTOR_NAME = "<init>";
-
- protected /*final*/ JCode code;
- protected final String[] argNames;
-
- protected final LinkedList/*<JLocalVariable>*/ localVariables =
- new LinkedList();
- protected int localVariableIndex = 0;
-
-
- protected JMethod(FJBGContext context,
- JClass owner,
- int accessFlags,
- String name,
- JType returnType,
- JType[] argTypes,
- String[] argNames) {
- super(context,
- owner,
- accessFlags,
- name,
- new JMethodType(returnType, argTypes));
- this.argNames = argNames;
-
- assert argTypes.length == argNames.length;
-
- if (isAbstract() || isNative()) {
- code = null;
- } else {
- code = context.JCode(owner, this);
- addAttribute(context.JCodeAttribute(owner, this));
-
- if (!isStatic())
- addNewLocalVariable(owner.getType(), "this");
-
- for (int i = 0; i < argTypes.length; ++i)
- addNewLocalVariable(argTypes[i], argNames[i]);
- }
- }
-
- protected JMethod(FJBGContext context,
- JClass owner,
- DataInputStream stream)
- throws IOException {
- super(context, owner, stream);
-
- assert isAbstract() || isNative() || code != null;
-
- int n = 0;
- if (code != null) {
- for (Iterator it = code.getAttributes().iterator(); it.hasNext(); ) {
- JAttribute attr = (JAttribute)it.next();
- if (attr instanceof JLocalVariableTableAttribute)
- n = ((JLocalVariableTableAttribute)attr).getMaxLocals();
- }
- }
- this.localVariableIndex = n;
-
-
- JType[] argTypes = ((JMethodType)getType()).getArgumentTypes();
- argNames = new String[argTypes.length]; // TODO get from attribute
- for (int i = 0; i < argNames.length; ++i)
- argNames[i] = "v"+i;
- }
-
- public void freeze() throws JCode.OffsetTooBigException {
- if (code != null) code.freeze();
- super.freeze();
- }
-
- public JType getReturnType() {
- return ((JMethodType)type).getReturnType();
- }
-
- public JType[] getArgumentTypes() {
- return ((JMethodType)type).getArgumentTypes();
- }
-
- public int getArgsSize() {
- int size = ((JMethodType)type).getArgsSize();
- if (!isStatic()) size += 1; // for this
- return size;
- }
-
- public String[] getArgumentNames() {
- return argNames;
- }
-
- public JCode getCode() {
- assert !isAbstract();
- return code;
- }
-
- // Invoked by the JCode constructor
- protected void setCode(JCode code) {
- assert null == this.code;
- this.code = code;
- }
-
- public JCodeIterator codeIterator() {
- return new JCodeIterator(code);
- }
-
- // Local variables
- // FIXME : find a better management method for local variables
- public JLocalVariable addNewLocalVariable(JType type, String name) {
- assert !frozen;
- JLocalVariable var =
- context.JLocalVariable(this, type, name, localVariableIndex);
- localVariableIndex += type.getSize();
- localVariables.add(var);
- return var;
- }
-
- public JLocalVariable getLocalVariable(int index) {
- for (int i = 0; i < localVariables.size(); i++) {
- if (((JLocalVariable)localVariables.get(i)).index == index)
- return (JLocalVariable)localVariables.get(i);
- }
- return null;
- }
-
- public JLocalVariable[] getLocalVariables() {
- return (JLocalVariable[])localVariables
- .toArray(new JLocalVariable[localVariables.size()]);
- }
-
-
- public int getMaxLocals() {
- return localVariableIndex;
- }
-
- // Follows javap output format for methods.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(flagsToString());
- String name = getName();
- if (CLASS_CONSTRUCTOR_NAME.equals(name))
- buf.append("{}");
- else {
- if (INSTANCE_CONSTRUCTOR_NAME.equals(name))
- name = getOwner().getName();
- else {
- buf.append(toExternalName(getReturnType()));
- buf.append(" ");
- }
- buf.append(toExternalName(name));
- buf.append("(");
- JType[] ts = getArgumentTypes();
- for (int i = 0; i < ts.length; ++i) {
- if (i > 0) buf.append(", ");
- buf.append(toExternalName(ts[i]));
- }
- buf.append(")");
- }
- buf.append(";\n");
- Iterator it = attributes.iterator();
- while(it.hasNext()) {
- JAttribute attr = (JAttribute)it.next();
- buf.append(attr);
- }
- return buf.toString();
- }
-
- private String flagsToString() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- if (isBridge()) buf.append("<bridge> ");
- if (hasVarargs()) buf.append("<varargs> ");
- if (isStatic()) buf.append("static ");
- else if (isNative()) buf.append("native ");
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
deleted file mode 100644
index cd3d71fd9c..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Type for Java methods. These types do not really exist in Java, but
- * are provided here because they are useful in several places.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JMethodType extends JType {
- protected final JType returnType;
- protected final JType[] argTypes;
- protected String signature = null;
-
- public final static JMethodType ARGLESS_VOID_FUNCTION =
- new JMethodType(JType.VOID, JType.EMPTY_ARRAY);
-
- public JMethodType(JType returnType, JType[] argTypes) {
- this.returnType = returnType;
- this.argTypes = argTypes;
- }
-
- public JType getReturnType() { return returnType; }
- public JType[] getArgumentTypes() { return argTypes; }
-
- public int getSize() {
- throw new UnsupportedOperationException();
- }
-
- public String getSignature() {
- if (signature == null) {
- StringBuffer buf = new StringBuffer();
- buf.append('(');
- for (int i = 0; i < argTypes.length; ++i)
- buf.append(argTypes[i].getSignature());
- buf.append(')');
- buf.append(returnType.getSignature());
- signature = buf.toString();
- }
- return signature;
- }
-
- public int getTag() { return T_UNKNOWN; }
-
- public String toString() {
- StringBuffer buf = new StringBuffer();
- buf.append('(');
- for (int i = 0; i < argTypes.length; ++i)
- buf.append(argTypes[i].toString());
- buf.append(')');
- buf.append(returnType.toString());
- return buf.toString();
- }
-
- public int getArgsSize() {
- int size = 0;
- for (int i = 0; i < argTypes.length; ++i)
- size += argTypes[i].getSize();
- return size;
- }
-
- public int getProducedStack() {
- return returnType.getSize() - getArgsSize();
- }
-
- public boolean isCompatibleWith(JType other) {
- return false;
- }
- public boolean equals(Object o) {
- if (o instanceof JMethodType)
- return ((JMethodType)o).getSignature().equals(this.getSignature());
- else
- return false;
- }
- public int hashCode() {
- if (signature == null)
- return 0;
- else
- return signature.hashCode();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
deleted file mode 100644
index 06db5b115a..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java objects.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JObjectType extends JReferenceType {
- protected final String name;
- protected String signature = null;
-
- public final static JObjectType JAVA_LANG_OBJECT =
- new JObjectType("java.lang.Object");
- public final static JObjectType JAVA_LANG_STRING =
- new JObjectType("java.lang.String");
- public final static JObjectType CLONEABLE =
- new JObjectType("Cloneable");
- public final static JObjectType JAVA_IO_SERIALIZABLE =
- new JObjectType("java.io.Serializable");
-
- public JObjectType(String name) {
- this.name = name;
- }
-
- public int getSize() { return 1; }
-
- public String getName() { return name; }
-
- public String getSignature() {
- if (signature == null)
- signature = "L" + name.replace('.','/') + ";";
- return signature;
- }
-
- public String getDescriptor() {
- return name.replace('.','/');
- }
-
- public int getTag() { return T_OBJECT; }
-
- public String toString() { return name; }
-
- public boolean isObjectType() { return true; }
-
- public boolean isCompatibleWith(JType other) {
- return other instanceof JObjectType
- || other == JType.REFERENCE;
- }
- public boolean equals(Object o) {
- if (o instanceof JObjectType)
- return ((JObjectType)o).getSignature().equals(this.getSignature());
- else
- return false;
- }
- public int hashCode() {
- return name.hashCode();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
deleted file mode 100644
index cc68681a96..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
+++ /dev/null
@@ -1,1267 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Definition of opcodes for the JVM.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JOpcode {
- public final String name;
- public final int code;
-
- // The following attributes can be (statically) unknown for some
- // instructions, and are therefore not public. To know their value,
- // functions have to be used (see JCodeIterator).
- protected final int size;
- protected final JType[] producedDataTypes;
- protected final JType[] consumedDataTypes;
- protected final int jumpKind;
- protected final int successorCount;
-
- protected final static int UNKNOWN = Integer.MIN_VALUE;
-
- protected final static int JMP_NONE = 0;
- protected final static int JMP_NEXT = 1;
- protected final static int JMP_ALWAYS_S2_OFFSET = 2;
- protected final static int JMP_ALWAYS_S4_OFFSET = 3;
- protected final static int JMP_MAYBE_S2_OFFSET = 4;
- protected final static int JMP_TABLE = 5;
- protected final static int JMP_LOOKUP = 6;
-
- protected final static JType[] NO_DATA = new JType[0];
-
- protected final static JType[] INT_TYPE =
- new JType[] { JType.INT };
- protected final static JType[] FLOAT_TYPE =
- new JType[] { JType.FLOAT };
- protected final static JType[] LONG_TYPE =
- new JType[] { JType.LONG };
- protected final static JType[] DOUBLE_TYPE =
- new JType[] { JType.DOUBLE };
- protected final static JType[] OBJECT_REF_TYPE =
- new JType[] { JObjectType.JAVA_LANG_OBJECT };
- protected final static JType[] ARRAY_REF_TYPE =
- new JType[] { new JArrayType(JType.VOID) };
- protected final static JType[] REFERENCE_TYPE =
- new JType[] { JType.REFERENCE };
- protected final static JType[] ADDRESS_TYPE =
- new JType[] { JType.ADDRESS };
- protected final static JType[] UNKNOWN_TYPE =
- new JType[] { JType.UNKNOWN };
-
- /// Instruction codes
- public final static int cNOP = 0;
- public final static int cACONST_NULL = 1;
- public final static int cICONST_M1 = 2;
- public final static int cICONST_0 = 3;
- public final static int cICONST_1 = 4;
- public final static int cICONST_2 = 5;
- public final static int cICONST_3 = 6;
- public final static int cICONST_4 = 7;
- public final static int cICONST_5 = 8;
- public final static int cLCONST_0 = 9;
- public final static int cLCONST_1 = 10;
- public final static int cFCONST_0 = 11;
- public final static int cFCONST_1 = 12;
- public final static int cFCONST_2 = 13;
- public final static int cDCONST_0 = 14;
- public final static int cDCONST_1 = 15;
- public final static int cBIPUSH = 16;
- public final static int cSIPUSH = 17;
- public final static int cLDC = 18;
- public final static int cLDC_W = 19;
- public final static int cLDC2_W = 20;
- public final static int cILOAD = 21;
- public final static int cLLOAD = 22;
- public final static int cFLOAD = 23;
- public final static int cDLOAD = 24;
- public final static int cALOAD = 25;
- public final static int cILOAD_0 = 26;
- public final static int cILOAD_1 = 27;
- public final static int cILOAD_2 = 28;
- public final static int cILOAD_3 = 29;
- public final static int cLLOAD_0 = 30;
- public final static int cLLOAD_1 = 31;
- public final static int cLLOAD_2 = 32;
- public final static int cLLOAD_3 = 33;
- public final static int cFLOAD_0 = 34;
- public final static int cFLOAD_1 = 35;
- public final static int cFLOAD_2 = 36;
- public final static int cFLOAD_3 = 37;
- public final static int cDLOAD_0 = 38;
- public final static int cDLOAD_1 = 39;
- public final static int cDLOAD_2 = 40;
- public final static int cDLOAD_3 = 41;
- public final static int cALOAD_0 = 42;
- public final static int cALOAD_1 = 43;
- public final static int cALOAD_2 = 44;
- public final static int cALOAD_3 = 45;
- public final static int cIALOAD = 46;
- public final static int cLALOAD = 47;
- public final static int cFALOAD = 48;
- public final static int cDALOAD = 49;
- public final static int cAALOAD = 50;
- public final static int cBALOAD = 51;
- public final static int cCALOAD = 52;
- public final static int cSALOAD = 53;
- public final static int cISTORE = 54;
- public final static int cLSTORE = 55;
- public final static int cFSTORE = 56;
- public final static int cDSTORE = 57;
- public final static int cASTORE = 58;
- public final static int cISTORE_0 = 59;
- public final static int cISTORE_1 = 60;
- public final static int cISTORE_2 = 61;
- public final static int cISTORE_3 = 62;
- public final static int cLSTORE_0 = 63;
- public final static int cLSTORE_1 = 64;
- public final static int cLSTORE_2 = 65;
- public final static int cLSTORE_3 = 66;
- public final static int cFSTORE_0 = 67;
- public final static int cFSTORE_1 = 68;
- public final static int cFSTORE_2 = 69;
- public final static int cFSTORE_3 = 70;
- public final static int cDSTORE_0 = 71;
- public final static int cDSTORE_1 = 72;
- public final static int cDSTORE_2 = 73;
- public final static int cDSTORE_3 = 74;
- public final static int cASTORE_0 = 75;
- public final static int cASTORE_1 = 76;
- public final static int cASTORE_2 = 77;
- public final static int cASTORE_3 = 78;
- public final static int cIASTORE = 79;
- public final static int cLASTORE = 80;
- public final static int cFASTORE = 81;
- public final static int cDASTORE = 82;
- public final static int cAASTORE = 83;
- public final static int cBASTORE = 84;
- public final static int cCASTORE = 85;
- public final static int cSASTORE = 86;
- public final static int cPOP = 87;
- public final static int cPOP2 = 88;
- public final static int cDUP = 89;
- public final static int cDUP_X1 = 90;
- public final static int cDUP_X2 = 91;
- public final static int cDUP2 = 92;
- public final static int cDUP2_X1 = 93;
- public final static int cDUP2_X2 = 94;
- public final static int cSWAP = 95;
- public final static int cIADD = 96;
- public final static int cLADD = 97;
- public final static int cFADD = 98;
- public final static int cDADD = 99;
- public final static int cISUB = 100;
- public final static int cLSUB = 101;
- public final static int cFSUB = 102;
- public final static int cDSUB = 103;
- public final static int cIMUL = 104;
- public final static int cLMUL = 105;
- public final static int cFMUL = 106;
- public final static int cDMUL = 107;
- public final static int cIDIV = 108;
- public final static int cLDIV = 109;
- public final static int cFDIV = 110;
- public final static int cDDIV = 111;
- public final static int cIREM = 112;
- public final static int cLREM = 113;
- public final static int cFREM = 114;
- public final static int cDREM = 115;
- public final static int cINEG = 116;
- public final static int cLNEG = 117;
- public final static int cFNEG = 118;
- public final static int cDNEG = 119;
- public final static int cISHL = 120;
- public final static int cLSHL = 121;
- public final static int cISHR = 122;
- public final static int cLSHR = 123;
- public final static int cIUSHR = 124;
- public final static int cLUSHR = 125;
- public final static int cIAND = 126;
- public final static int cLAND = 127;
- public final static int cIOR = 128;
- public final static int cLOR = 129;
- public final static int cIXOR = 130;
- public final static int cLXOR = 131;
- public final static int cIINC = 132;
- public final static int cI2L = 133;
- public final static int cI2F = 134;
- public final static int cI2D = 135;
- public final static int cL2I = 136;
- public final static int cL2F = 137;
- public final static int cL2D = 138;
- public final static int cF2I = 139;
- public final static int cF2L = 140;
- public final static int cF2D = 141;
- public final static int cD2I = 142;
- public final static int cD2L = 143;
- public final static int cD2F = 144;
- public final static int cI2B = 145;
- public final static int cI2C = 146;
- public final static int cI2S = 147;
- public final static int cLCMP = 148;
- public final static int cFCMPL = 149;
- public final static int cFCMPG = 150;
- public final static int cDCMPL = 151;
- public final static int cDCMPG = 152;
- public final static int cIFEQ = 153;
- public final static int cIFNE = 154;
- public final static int cIFLT = 155;
- public final static int cIFGE = 156;
- public final static int cIFGT = 157;
- public final static int cIFLE = 158;
- public final static int cIF_ICMPEQ = 159;
- public final static int cIF_ICMPNE = 160;
- public final static int cIF_ICMPLT = 161;
- public final static int cIF_ICMPGE = 162;
- public final static int cIF_ICMPGT = 163;
- public final static int cIF_ICMPLE = 164;
- public final static int cIF_ACMPEQ = 165;
- public final static int cIF_ACMPNE = 166;
- public final static int cGOTO = 167;
- public final static int cJSR = 168;
- public final static int cRET = 169;
- public final static int cTABLESWITCH = 170;
- public final static int cLOOKUPSWITCH = 171;
- public final static int cIRETURN = 172;
- public final static int cLRETURN = 173;
- public final static int cFRETURN = 174;
- public final static int cDRETURN = 175;
- public final static int cARETURN = 176;
- public final static int cRETURN = 177;
- public final static int cGETSTATIC = 178;
- public final static int cPUTSTATIC = 179;
- public final static int cGETFIELD = 180;
- public final static int cPUTFIELD = 181;
- public final static int cINVOKEVIRTUAL = 182;
- public final static int cINVOKESPECIAL = 183;
- public final static int cINVOKESTATIC = 184;
- public final static int cINVOKEINTERFACE = 185;
- public final static int cNEW = 187;
- public final static int cNEWARRAY = 188;
- public final static int cANEWARRAY = 189;
- public final static int cARRAYLENGTH = 190;
- public final static int cATHROW = 191;
- public final static int cCHECKCAST = 192;
- public final static int cINSTANCEOF = 193;
- public final static int cMONITORENTER = 194;
- public final static int cMONITOREXIT = 195;
- public final static int cWIDE = 196;
- public final static int cMULTIANEWARRAY = 197;
- public final static int cIFNULL = 198;
- public final static int cIFNONNULL = 199;
- public final static int cGOTO_W = 200;
- public final static int cJSR_W = 201;
-
- // Objects representing instructions
- public final static JOpcode NOP =
- new JOpcode("NOP", cNOP, 1, NO_DATA, NO_DATA, JMP_NEXT);
- public final static JOpcode ACONST_NULL = new JOpcode("ACONST_NULL",
- cACONST_NULL,
- 1,
- REFERENCE_TYPE,
- NO_DATA,
- JMP_NEXT);
- public final static JOpcode ICONST_M1 =
- new JOpcode("ICONST_M1", cICONST_M1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_0 =
- new JOpcode("ICONST_0", cICONST_0, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_1 =
- new JOpcode("ICONST_1", cICONST_1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_2 =
- new JOpcode("ICONST_2", cICONST_2, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_3 =
- new JOpcode("ICONST_3", cICONST_3, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_4 =
- new JOpcode("ICONST_4", cICONST_4, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_5 =
- new JOpcode("ICONST_5", cICONST_5, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LCONST_0 =
- new JOpcode("LCONST_0", cLCONST_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LCONST_1 =
- new JOpcode("LCONST_1", cLCONST_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FCONST_0 =
- new JOpcode("FCONST_0", cFCONST_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FCONST_1 =
- new JOpcode("FCONST_1", cFCONST_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FCONST_2 =
- new JOpcode("FCONST_2", cFCONST_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DCONST_0 =
- new JOpcode("DCONST_0", cDCONST_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DCONST_1 =
- new JOpcode("DCONST_1", cDCONST_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode BIPUSH =
- new JOpcode("BIPUSH", cBIPUSH, 2, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode SIPUSH =
- new JOpcode("SIPUSH", cSIPUSH, 3, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LDC =
- new JOpcode("LDC", cLDC, 2, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LDC_W =
- new JOpcode("LDC_W", cLDC_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LDC2_W =
- new JOpcode("LDC2_W", cLDC2_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD =
- new JOpcode("ILOAD", cILOAD, 2, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD =
- new JOpcode("LLOAD", cLLOAD, 2, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD =
- new JOpcode("FLOAD", cFLOAD, 2, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD =
- new JOpcode("DLOAD", cDLOAD, 2, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD =
- new JOpcode("ALOAD", cALOAD, 2, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_0 =
- new JOpcode("ILOAD_0", cILOAD_0, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_1 =
- new JOpcode("ILOAD_1", cILOAD_1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_2 =
- new JOpcode("ILOAD_2", cILOAD_2, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_3 =
- new JOpcode("ILOAD_3", cILOAD_3, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_0 =
- new JOpcode("LLOAD_0", cLLOAD_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_1 =
- new JOpcode("LLOAD_1", cLLOAD_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_2 =
- new JOpcode("LLOAD_2", cLLOAD_2, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_3 =
- new JOpcode("LLOAD_3", cLLOAD_3, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_0 =
- new JOpcode("FLOAD_0", cFLOAD_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_1 =
- new JOpcode("FLOAD_1", cFLOAD_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_2 =
- new JOpcode("FLOAD_2", cFLOAD_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_3 =
- new JOpcode("FLOAD_3", cFLOAD_3, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_0 =
- new JOpcode("DLOAD_0", cDLOAD_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_1 =
- new JOpcode("DLOAD_1", cDLOAD_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_2 =
- new JOpcode("DLOAD_2", cDLOAD_2, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_3 =
- new JOpcode("DLOAD_3", cDLOAD_3, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_0 =
- new JOpcode("ALOAD_0", cALOAD_0, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_1 =
- new JOpcode("ALOAD_1", cALOAD_1, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_2 =
- new JOpcode("ALOAD_2", cALOAD_2, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_3 =
- new JOpcode("ALOAD_3", cALOAD_3, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode IALOAD =
- new JOpcode("IALOAD",
- cIALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JArrayType.INT},
- JMP_NEXT);
- public final static JOpcode LALOAD =
- new JOpcode("LALOAD",
- cLALOAD,
- 1,
- LONG_TYPE,
- new JType[] {JType.INT, JArrayType.LONG},
- JMP_NEXT);
- public final static JOpcode FALOAD =
- new JOpcode("FALOAD",
- cFALOAD,
- 1,
- FLOAT_TYPE,
- new JType[] {JType.INT, JArrayType.FLOAT},
- JMP_NEXT);
- public final static JOpcode DALOAD =
- new JOpcode("DALOAD",
- cDALOAD,
- 1,
- DOUBLE_TYPE,
- new JType[] {JType.INT, JArrayType.DOUBLE},
- JMP_NEXT);
- public final static JOpcode AALOAD =
- new JOpcode("AALOAD",
- cAALOAD,
- 1,
- REFERENCE_TYPE,
- new JType[] {JType.INT, JArrayType.REFERENCE},
- JMP_NEXT);
- public final static JOpcode BALOAD =
- new JOpcode("BALOAD",
- cBALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, new JArrayType(JType.UNKNOWN)},
- JMP_NEXT);
- public final static JOpcode CALOAD =
- new JOpcode("CALOAD",
- cCALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JArrayType.CHAR},
- JMP_NEXT);
- public final static JOpcode SALOAD =
- new JOpcode("SALOAD",
- cSALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JArrayType.SHORT},
- JMP_NEXT);
- public final static JOpcode ISTORE =
- new JOpcode("ISTORE", cISTORE, 2, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE =
- new JOpcode("LSTORE", cLSTORE, 2, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE =
- new JOpcode("FSTORE", cFSTORE, 2, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE =
- new JOpcode("DSTORE", cDSTORE, 2, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode ASTORE =
- new JOpcode("ASTORE", cASTORE, 2, NO_DATA, REFERENCE_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_0 =
- new JOpcode("ISTORE_0", cISTORE_0, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_1 =
- new JOpcode("ISTORE_1", cISTORE_1, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_2 =
- new JOpcode("ISTORE_2", cISTORE_2, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_3 =
- new JOpcode("ISTORE_3", cISTORE_3, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_0 =
- new JOpcode("LSTORE_0", cLSTORE_0, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_1 =
- new JOpcode("LSTORE_1", cLSTORE_1, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_2 =
- new JOpcode("LSTORE_2", cLSTORE_2, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_3 =
- new JOpcode("LSTORE_3", cLSTORE_3, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_0 =
- new JOpcode("FSTORE_0", cFSTORE_0, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_1 =
- new JOpcode("FSTORE_1", cFSTORE_1, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_2 =
- new JOpcode("FSTORE_2", cFSTORE_2, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_3 =
- new JOpcode("FSTORE_3", cFSTORE_3, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_0 =
- new JOpcode("DSTORE_0", cDSTORE_0, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_1 =
- new JOpcode("DSTORE_1", cDSTORE_1, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_2 =
- new JOpcode("DSTORE_2", cDSTORE_2, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_3 =
- new JOpcode("DSTORE_3", cDSTORE_3, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode ASTORE_0 = new JOpcode("ASTORE_0",
- cASTORE_0,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode ASTORE_1 = new JOpcode("ASTORE_1",
- cASTORE_1,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode ASTORE_2 = new JOpcode("ASTORE_2",
- cASTORE_2,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode ASTORE_3 = new JOpcode("ASTORE_3",
- cASTORE_3,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode IASTORE =
- new JOpcode("IASTORE",
- cIASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- JArrayType.INT},
- JMP_NEXT);
- public final static JOpcode LASTORE =
- new JOpcode("LASTORE",
- cLASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.LONG,
- JType.INT,
- JArrayType.LONG},
- JMP_NEXT);
- public final static JOpcode FASTORE =
- new JOpcode("FASTORE",
- cFASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.FLOAT,
- JType.INT,
- JArrayType.FLOAT},
- JMP_NEXT);
- public final static JOpcode DASTORE =
- new JOpcode("DASTORE",
- cDASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.DOUBLE,
- JType.INT,
- JArrayType.DOUBLE},
- JMP_NEXT);
- public final static JOpcode AASTORE =
- new JOpcode("AASTORE",
- cAASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.REFERENCE,
- JType.INT,
- JArrayType.REFERENCE},
- JMP_NEXT);
- public final static JOpcode BASTORE =
- new JOpcode("BASTORE",
- cBASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- new JArrayType(JType.UNKNOWN)},
- JMP_NEXT);
- public final static JOpcode CASTORE =
- new JOpcode("CASTORE",
- cCASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- JArrayType.CHAR},
- JMP_NEXT);
- public final static JOpcode SASTORE =
- new JOpcode("SASTORE",
- cSASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- JArrayType.SHORT},
- JMP_NEXT);
- public final static JOpcode POP =
- new JOpcode("POP", cPOP, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode POP2 =
- new JOpcode("POP2", cPOP2, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode DUP =
- new JOpcode("DUP", cDUP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode DUP_X1 = new JOpcode("DUP_X1",
- cDUP_X1,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode DUP_X2 = new JOpcode("DUP_X2",
- cDUP_X2,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode DUP2 =
- new JOpcode("DUP2", cDUP2, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode DUP2_X1 = new JOpcode("DUP2_X1",
- cDUP2_X1,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode DUP2_X2 = new JOpcode("DUP2_X2",
- cDUP2_X2,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode SWAP =
- new JOpcode("SWAP", cSWAP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode IADD =
- new JOpcode("IADD",
- cIADD,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LADD =
- new JOpcode("LADD",
- cLADD,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FADD =
- new JOpcode("FADD",
- cFADD,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DADD =
- new JOpcode("DADD",
- cDADD,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode ISUB =
- new JOpcode("ISUB",
- cISUB,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LSUB =
- new JOpcode("LSUB",
- cLSUB,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FSUB =
- new JOpcode("FSUB",
- cFSUB,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DSUB =
- new JOpcode("DSUB",
- cDSUB,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IMUL =
- new JOpcode("IMUL",
- cIMUL,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LMUL =
- new JOpcode("LMUL",
- cLMUL,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FMUL =
- new JOpcode("FMUL",
- cFMUL,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DMUL =
- new JOpcode("DMUL",
- cDMUL,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IDIV =
- new JOpcode("IDIV",
- cIDIV,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LDIV =
- new JOpcode("LDIV",
- cLDIV,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FDIV =
- new JOpcode("FDIV",
- cFDIV,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DDIV =
- new JOpcode("DDIV",
- cDDIV,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IREM =
- new JOpcode("IREM",
- cIREM,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LREM =
- new JOpcode("LREM",
- cLREM,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FREM =
- new JOpcode("FREM",
- cFREM,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DREM =
- new JOpcode("DREM",
- cDREM,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode INEG =
- new JOpcode("INEG", cINEG, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode LNEG =
- new JOpcode("LNEG", cLNEG, 1, LONG_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode FNEG =
- new JOpcode("FNEG", cFNEG, 1, FLOAT_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode DNEG =
- new JOpcode("DNEG", cDNEG, 1, DOUBLE_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode ISHL =
- new JOpcode("ISHL", cISHL,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LSHL =
- new JOpcode("LSHL",
- cLSHL,
- 1,
- LONG_TYPE,
- new JType [] { JType.INT, JType.LONG },
- JMP_NEXT);
- public final static JOpcode ISHR =
- new JOpcode("ISHR",
- cISHR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LSHR =
- new JOpcode("LSHR",
- cLSHR,
- 1,
- LONG_TYPE,
- new JType[] { JType.INT, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IUSHR =
- new JOpcode("IUSHR",
- cIUSHR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LUSHR =
- new JOpcode("LUSHR",
- cLUSHR,
- 1,
- LONG_TYPE,
- new JType[] { JType.INT, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IAND =
- new JOpcode("IAND",
- cIAND,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LAND =
- new JOpcode("LAND",
- cLAND,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IOR =
- new JOpcode("IOR",
- cIOR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LOR =
- new JOpcode("LOR",
- cLOR,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IXOR =
- new JOpcode("IXOR",
- cIXOR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LXOR =
- new JOpcode("LXOR",
- cLXOR,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IINC =
- new JOpcode("IINC", cIINC, 3, NO_DATA, NO_DATA, JMP_NEXT);
- public final static JOpcode I2L =
- new JOpcode("I2L", cI2L, 1, LONG_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2F =
- new JOpcode("I2F", cI2F, 1, FLOAT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2D =
- new JOpcode("I2D", cI2D, 1, DOUBLE_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode L2I =
- new JOpcode("L2I", cL2I, 1, INT_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode L2F =
- new JOpcode("L2F", cL2F, 1, FLOAT_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode L2D =
- new JOpcode("L2D", cL2D, 1, DOUBLE_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode F2I =
- new JOpcode("F2I", cF2I, 1, INT_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode F2L =
- new JOpcode("F2L", cF2L, 1, LONG_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode F2D =
- new JOpcode("F2D", cF2D, 1, DOUBLE_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode D2I =
- new JOpcode("D2I", cD2I, 1, INT_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode D2L =
- new JOpcode("D2L", cD2L, 1, LONG_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode D2F =
- new JOpcode("D2F", cD2F, 1, FLOAT_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode I2B =
- new JOpcode("I2B", cI2B, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2C =
- new JOpcode("I2C", cI2C, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2S =
- new JOpcode("I2S", cI2S, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode LCMP =
- new JOpcode("LCMP",
- cLCMP,
- 1,
- INT_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FCMPL =
- new JOpcode("FCMPL",
- cFCMPL,
- 1,
- INT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode FCMPG =
- new JOpcode("FCMPG",
- cFCMPG,
- 1,
- INT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DCMPL =
- new JOpcode("DCMPL",
- cDCMPL,
- 1,
- INT_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode DCMPG =
- new JOpcode("DCMPG",
- cDCMPG,
- 1,
- INT_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IFEQ =
- new JOpcode("IFEQ", cIFEQ, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFNE =
- new JOpcode("IFNE", cIFNE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFLT =
- new JOpcode("IFLT", cIFLT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFGE =
- new JOpcode("IFGE", cIFGE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFGT =
- new JOpcode("IFGT", cIFGT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFLE =
- new JOpcode("IFLE", cIFLE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPEQ =
- new JOpcode("IF_ICMPEQ",
- cIF_ICMPEQ,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPNE =
- new JOpcode("IF_ICMPNE",
- cIF_ICMPNE,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPLT =
- new JOpcode("IF_ICMPLT",
- cIF_ICMPLT,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPGE =
- new JOpcode("IF_ICMPGE",
- cIF_ICMPGE,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPGT =
- new JOpcode("IF_ICMPGT",
- cIF_ICMPGT,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPLE =
- new JOpcode("IF_ICMPLE",
- cIF_ICMPLE,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ACMPEQ =
- new JOpcode("IF_ACMPEQ",
- cIF_ACMPEQ,
- 3,
- NO_DATA,
- new JType[] { JType.REFERENCE, JType.REFERENCE },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ACMPNE =
- new JOpcode("IF_ACMPNE",
- cIF_ACMPNE,
- 3,
- NO_DATA,
- new JType[] { JType.REFERENCE, JType.REFERENCE },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode GOTO =
- new JOpcode("GOTO", cGOTO, 3, NO_DATA, NO_DATA, JMP_ALWAYS_S2_OFFSET);
- public final static JOpcode JSR =
- new JOpcode("JSR", cJSR, 3, ADDRESS_TYPE, NO_DATA, JMP_ALWAYS_S2_OFFSET);
- public final static JOpcode RET =
- new JOpcode("RET", cRET, 2, NO_DATA, NO_DATA, JMP_NONE);
- public final static JOpcode TABLESWITCH = new JOpcode("TABLESWITCH",
- cTABLESWITCH,
- UNKNOWN,
- NO_DATA,
- INT_TYPE,
- JMP_TABLE);
- public final static JOpcode LOOKUPSWITCH = new JOpcode("LOOKUPSWITCH",
- cLOOKUPSWITCH,
- UNKNOWN,
- NO_DATA,
- INT_TYPE,
- JMP_LOOKUP);
- public final static JOpcode IRETURN =
- new JOpcode("IRETURN", cIRETURN, 1, NO_DATA, INT_TYPE, JMP_NONE);
- public final static JOpcode LRETURN =
- new JOpcode("LRETURN", cLRETURN, 1, NO_DATA, LONG_TYPE, JMP_NONE);
- public final static JOpcode FRETURN =
- new JOpcode("FRETURN", cFRETURN, 1, NO_DATA, FLOAT_TYPE, JMP_NONE);
- public final static JOpcode DRETURN =
- new JOpcode("DRETURN", cDRETURN, 1, NO_DATA, DOUBLE_TYPE, JMP_NONE);
- public final static JOpcode ARETURN = new JOpcode("ARETURN",
- cARETURN,
- 1,
- NO_DATA,
- OBJECT_REF_TYPE,
- JMP_NONE);
- public final static JOpcode RETURN =
- new JOpcode("RETURN", cRETURN, 1, NO_DATA, NO_DATA, JMP_NONE);
- public final static JOpcode GETSTATIC = new JOpcode("GETSTATIC",
- cGETSTATIC,
- 3,
- UNKNOWN_TYPE,
- NO_DATA,
- JMP_NEXT);
- public final static JOpcode PUTSTATIC = new JOpcode("PUTSTATIC",
- cPUTSTATIC,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode GETFIELD = new JOpcode("GETFIELD",
- cGETFIELD,
- 3,
- UNKNOWN_TYPE,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode PUTFIELD =
- new JOpcode("PUTFIELD", cPUTFIELD, 3, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode INVOKEVIRTUAL = new JOpcode("INVOKEVIRTUAL",
- cINVOKEVIRTUAL,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode INVOKESPECIAL = new JOpcode("INVOKESPECIAL",
- cINVOKESPECIAL,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode INVOKESTATIC = new JOpcode("INVOKESTATIC",
- cINVOKESTATIC,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode INVOKEINTERFACE =
- new JOpcode("INVOKEINTERFACE",
- cINVOKEINTERFACE,
- 5,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode NEW =
- new JOpcode("NEW", cNEW, 3, OBJECT_REF_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode NEWARRAY =
- new JOpcode("NEWARRAY",
- cNEWARRAY,
- 2,
- ARRAY_REF_TYPE,
- INT_TYPE,
- JMP_NEXT);
- public final static JOpcode ANEWARRAY =
- new JOpcode("ANEWARRAY",
- cANEWARRAY,
- 3,
- ARRAY_REF_TYPE,
- INT_TYPE,
- JMP_NEXT);
- public final static JOpcode ARRAYLENGTH = new JOpcode("ARRAYLENGTH",
- cARRAYLENGTH,
- 1,
- INT_TYPE,
- ARRAY_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode ATHROW = new JOpcode("ATHROW",
- cATHROW,
- 1,
- OBJECT_REF_TYPE,
- OBJECT_REF_TYPE,
- JMP_NONE);
- public final static JOpcode CHECKCAST = new JOpcode("CHECKCAST",
- cCHECKCAST,
- 3,
- OBJECT_REF_TYPE,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode INSTANCEOF = new JOpcode("INSTANCEOF",
- cINSTANCEOF,
- 3,
- INT_TYPE,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode MONITORENTER = new JOpcode("MONITORENTER",
- cMONITORENTER,
- 1,
- NO_DATA,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode MONITOREXIT = new JOpcode("MONITOREXIT",
- cMONITOREXIT,
- 1,
- NO_DATA,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode WIDE = new JOpcode("WIDE",
- cWIDE,
- UNKNOWN,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode MULTIANEWARRAY = new JOpcode("MULTIANEWARRAY",
- cMULTIANEWARRAY,
- 4,
- ARRAY_REF_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode IFNULL = new JOpcode("IFNULL",
- cIFNULL,
- 3,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFNONNULL = new JOpcode("IFNONNULL",
- cIFNONNULL,
- 3,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode GOTO_W = new JOpcode("GOTO_W",
- cGOTO_W,
- 5,
- NO_DATA,
- NO_DATA,
- JMP_ALWAYS_S4_OFFSET);
- public final static JOpcode JSR_W =
- new JOpcode("JSR_W", cJSR_W, 5, ADDRESS_TYPE, NO_DATA, JMP_NEXT);
-
- public final static JOpcode[] OPCODES = {
- NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
- ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0,
- LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
- DCONST_1, BIPUSH, SIPUSH, LDC, LDC_W,
- LDC2_W, ILOAD, LLOAD, FLOAD, DLOAD,
- ALOAD, ILOAD_0, ILOAD_1, ILOAD_2, ILOAD_3,
- LLOAD_0, LLOAD_1, LLOAD_2, LLOAD_3, FLOAD_0,
- FLOAD_1, FLOAD_2, FLOAD_3, DLOAD_0, DLOAD_1,
- DLOAD_2, DLOAD_3, ALOAD_0, ALOAD_1, ALOAD_2,
- ALOAD_3, IALOAD, LALOAD, FALOAD, DALOAD,
- AALOAD, BALOAD, CALOAD, SALOAD, ISTORE,
- LSTORE, FSTORE, DSTORE, ASTORE, ISTORE_0,
- ISTORE_1, ISTORE_2, ISTORE_3, LSTORE_0, LSTORE_1,
- LSTORE_2, LSTORE_3, FSTORE_0, FSTORE_1, FSTORE_2,
- FSTORE_3, DSTORE_0, DSTORE_1, DSTORE_2, DSTORE_3,
- ASTORE_0, ASTORE_1, ASTORE_2, ASTORE_3, IASTORE,
- LASTORE, FASTORE, DASTORE, AASTORE, BASTORE,
- CASTORE, SASTORE, POP, POP2, DUP,
- DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2,
- SWAP, IADD, LADD, FADD, DADD,
- ISUB, LSUB, FSUB, DSUB, IMUL,
- LMUL, FMUL, DMUL, IDIV, LDIV,
- FDIV, DDIV, IREM, LREM, FREM,
- DREM, INEG, LNEG, FNEG, DNEG,
- ISHL, LSHL, ISHR, LSHR, IUSHR,
- LUSHR, IAND, LAND, IOR, LOR,
- IXOR, LXOR, IINC, I2L, I2F,
- I2D, L2I, L2F, L2D, F2I,
- F2L, F2D, D2I, D2L, D2F,
- I2B, I2C, I2S, LCMP, FCMPL,
- FCMPG, DCMPL, DCMPG, IFEQ, IFNE,
- IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
- IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
- IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, RET,
- TABLESWITCH, LOOKUPSWITCH, IRETURN, LRETURN, FRETURN,
- DRETURN, ARETURN, RETURN, GETSTATIC, PUTSTATIC,
- GETFIELD, PUTFIELD, INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC,
- INVOKEINTERFACE, null, NEW, NEWARRAY, ANEWARRAY,
- ARRAYLENGTH, ATHROW, CHECKCAST, INSTANCEOF, MONITORENTER,
- MONITOREXIT, WIDE, MULTIANEWARRAY, IFNULL, IFNONNULL,
- GOTO_W, JSR_W
- };
-
- protected JOpcode(String name,
- int code,
- int size,
- JType[] producedDataTypes,
- JType[] consumedDataTypes,
- int jumpKind) {
- this.name = name;
- this.code = code;
- this.size = size;
- this.producedDataTypes = producedDataTypes;
- this.consumedDataTypes = consumedDataTypes;
- this.jumpKind = jumpKind;
- switch (jumpKind) {
- case JMP_NONE: successorCount = 0; break;
- case JMP_NEXT: successorCount = 1; break;
- case JMP_ALWAYS_S2_OFFSET: successorCount = 1; break;
- case JMP_ALWAYS_S4_OFFSET: successorCount = 1; break;
- case JMP_MAYBE_S2_OFFSET: successorCount = 2; break;
- case JMP_TABLE: successorCount = UNKNOWN; break;
- case JMP_LOOKUP: successorCount = UNKNOWN; break;
- default: successorCount = UNKNOWN; break;
- }
- }
-
- public String toString() { return name; }
- protected int getSize() { return size; }
- protected JType[] getProducedDataTypes() { return producedDataTypes; }
- protected JType[] getConsumedDataTypes() { return consumedDataTypes; }
-
- protected int getProducedDataSize() {
- if (producedDataTypes != UNKNOWN_TYPE)
- return JType.getTotalSize(producedDataTypes);
- else {
- switch (code) {
- case cLDC: case cLDC_W: case cBALOAD:
- return 1;
- case cLDC2_W: case cDUP: case cSWAP:
- return 2;
- case cDUP_X1:
- return 3;
- case cDUP_X2: case cDUP2:
- return 4;
- case cDUP2_X1:
- return 5;
- case cDUP2_X2:
- return 6;
- default:
- throw new Error(this.toString());
- }
- }
- }
-
- protected int getConsumedDataSize() {
- if (consumedDataTypes != UNKNOWN_TYPE)
- return JType.getTotalSize(consumedDataTypes);
- else {
- switch (code) {
- case cPOP: case cDUP:
- return 1;
- case cPOP2: case cDUP_X1: case cDUP2: case cSWAP:
- return 2;
- case cDUP_X2: case cDUP2_X1:
- return 3;
- case cDUP2_X2:
- return 4;
- default:
- throw new Error(this.toString());
- }
- }
- }
-
- protected int getProducedDataTypesNumber() {
- if (producedDataTypes != UNKNOWN_TYPE)
- return producedDataTypes.length;
- else {
- switch (code) {
- case cLDC: case cLDC_W: case cLDC2_W: case cBALOAD:
- case cGETSTATIC: case cGETFIELD:
- return 1;
- case cDUP: case cSWAP:
- return 2;
- case cDUP_X2: case cDUP2: case cDUP2_X1: case cDUP2_X2:
- return 2;
- case cDUP_X1:
- return 3;
- default:
- throw new Error(this.toString());
- }
- }
- }
-
- protected int getConsumedDataTypesNumber() {
- if (consumedDataTypes != UNKNOWN_TYPE)
- return consumedDataTypes.length;
- else {
- switch (code) {
- case cPOP: case cDUP: case cPUTSTATIC:
- return 1;
- case cPUTFIELD: case cDUP_X1: case cDUP_X2:
- case cDUP2: case cDUP2_X1: case cPOP2: case cSWAP:
- return 2;
- default:
- throw new Error(this.toString());
- }
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
deleted file mode 100644
index 50aa9d3636..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Attributes which are unknown to the JVM (or at least to this library).
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JOtherAttribute extends JAttribute {
- protected final String name;
- protected final byte[] contents;
- protected final int length;
-
- public JOtherAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- byte[] contents,
- int length) {
- super(context, clazz, name);
- this.name = name;
- this.contents = contents;
- this.length = length;
- }
-
- public JOtherAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.name = name;
- this.contents = new byte[size];
- this.length = size;
-
- stream.read(contents, 0, length);
- }
-
- public String getName() { return name; }
-
- // Follows javap output format for user-defined attributes.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" ");
- buf.append(name);
- buf.append(": length = 0x");
- buf.append(Integer.toHexString(length).toUpperCase());
- for (int i = 0; i < length; ++i) {
- if (i % 16 == 0) buf.append("\n ");
- buf.append(hexString(contents[i]));
- buf.append(" ");
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() { return length; }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.write(contents, 0, length);
- }
-
- private static final String hexString(int i) {
- return ((0 <= i && i < 16) ? "0" : "")+Integer.toHexString(i).toUpperCase();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
deleted file mode 100644
index 73d1026c04..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java references, i.e. arrays and objects.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-abstract public class JReferenceType extends JType {
- public boolean isReferenceType() { return true; }
-
- abstract public String getDescriptor();
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
deleted file mode 100644
index 3a17cb2c44..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Sourcefile attribute, which can be attached to class files to
- * associate them with their source file.
- *
- * There can be no more than one SourceFile attribute in the attributes table
- * of a given ClassFile structure. See section 4.8.9 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JSourceFileAttribute extends JAttribute {
- protected final String sourceFileName;
- protected final int sourceFileIndex;
-
- public JSourceFileAttribute(FJBGContext context,
- JClass clazz,
- String sourceFileName) {
- super(context, clazz);
- this.sourceFileName = sourceFileName;
- this.sourceFileIndex = clazz.getConstantPool().addUtf8(sourceFileName);
- }
-
- public JSourceFileAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
-
- this.sourceFileIndex = stream.readShort();
- this.sourceFileName = clazz.getConstantPool().lookupUtf8(sourceFileIndex);
-
- assert name.equals(getName());
- }
-
- public String getName() { return "SourceFile"; }
-
- public String getFileName() { return sourceFileName; }
-
- // Follows javap output format for SourceFile attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" SourceFile: \"");
- buf.append(sourceFileName);
- buf.append("\"\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 2; // Short.SIZE
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(sourceFileIndex);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
deleted file mode 100644
index 72a5484d40..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JStackMapTableAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- /** StackMapTable entries */
- protected final List/*<Frame>*/ entries = new ArrayList();
- protected int entriesSize = 0;
- protected boolean usesU2;
-
- public JStackMapTableAttribute(FJBGContext context,
- JClass clazz,
- JCode code) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- assert code.getOwner().getOwner() == clazz;
- }
-
- public JStackMapTableAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- int count = stream.readShort();
- this.usesU2 = count < 65536;
- for (int i = 0; i < count; ++i)
- this.entries.add(new Frame(stream));
- this.entriesSize = computeSize();
-
- assert name.equals(getName());
- }
-
- public String getName() { return "StackMapTable"; }
-
- // Follows javap output format for StackMapTable attribute.
- /*@Override*/ public String toString() {
- Frame frame = null;
- StringBuffer buf = new StringBuffer(" StackMapTable: number_of_entries = ");
- buf.append(entries.size());
- Iterator it = entries.iterator();
- while (it.hasNext()) {
- frame = (Frame)it.next();
- buf.append("\n frame_type = ");
- buf.append(frame.tag);
- buf.append(" /* ");
- buf.append(getFrameType(frame.tag));
- buf.append(" */");
- if (frame.offsetDelta != -1)
- buf.append("\n offset_delta = "+frame.offsetDelta);
- if (frame.locals != null)
- appendTypeInfoArray(buf, "locals", frame.locals);
- if (frame.stackItems != null)
- appendTypeInfoArray(buf, "stack", frame.stackItems);
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return entriesSize;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(entriesSize);
- Iterator it = entries.iterator();
- while (it.hasNext()) {
- Frame frame = (Frame)it.next();
- frame.writeContentsTo(stream);
- }
- }
-
- private class TypeInfo {
- final int tag;
- final int poolIndexOrOffset; // tag == 7 => poolIndex, tag = 8 => offset
- private int bytes;
- TypeInfo(DataInputStream stream) throws IOException {
- int size = 1;
- this.tag = stream.readByte();
- if (tag == 7) { // ITEM_Object; // 7
- poolIndexOrOffset = stream.readShort();
- size += 2;
- } else if (tag == 8) { // ITEM_Uninitialized // 8
- poolIndexOrOffset = (usesU2) ? stream.readShort() : stream.readInt();
- size += (usesU2) ? 2 : 4;
- } else
- poolIndexOrOffset = -1;
- this.bytes += size;
- }
- int getSize() { return bytes; }
- void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeByte(tag);
- if (tag == 7) { // ITEM_Object; // 7
- stream.writeShort(poolIndexOrOffset);
- } else if (tag == 8) { // ITEM_Uninitialized // 8
- if (usesU2) stream.writeShort(poolIndexOrOffset);
- else stream.writeInt(poolIndexOrOffset);
- }
- }
- /*@Override*/ public String toString() {
- switch (tag) {
- case 0: // ITEM_Top
- return "<top>";
- case 1: // ITEM_Integer
- return "int";
- case 2: // ITEM_Float
- return "float";
- case 3: // ITEM_Double
- return "double";
- case 4: // ITEM_Long
- return "long";
- case 5: // ITEM_Null
- return "null";
- case 6: // ITEM_UninializedThis
- return "this";
- case 7: // ITEM_Object
- String name = pool.lookupClass(poolIndexOrOffset);
- if (name.startsWith("[")) name = "\""+name+"\"";
- return "class "+name;
- case 8: // ITEM_Uninitialized
- return "<uninitialized>";
- default:
- return String.valueOf(tag);
- }
- }
- }
-
- private class Frame {
- final int tag;
- int offsetDelta = -1;
- TypeInfo[] stackItems = null;
- TypeInfo[] locals = null;
- private int bytes;
- Frame(DataInputStream stream) throws IOException {
- // The stack_map_frame structure consists of a one-byte tag
- // followed by zero or more bytes.
- this.tag = stream.readUnsignedByte();
- if (tag < 64) { // SAME; // 0-63
- //done
- } else if (tag < 128) { // SAME_LOCALS_1_STACK_ITEM; // 64-127
- this.offsetDelta = tag - 64;
- readStackItems(stream, 1);
- } else if (tag < 248) { // reserved for future use.
- assert false : "Tags in the range [128-247] are reserved for future use.";
- } else if (tag < 251) { // CHOP; // 248-250
- int k = 251 - tag;
- readOffsetDelta(stream);
- } else if (tag == 251) { // SAME_FRAME_EXTENDED
- readOffsetDelta(stream);
- } else if (tag < 255) { // APPEND; // 252-254
- readOffsetDelta(stream);
- readLocals(stream, tag - 251);
- } else { // FULL_FRAME; // 255
- readOffsetDelta(stream);
- readLocals(stream);
- readStackItems(stream);
- }
- }
- int getSize() { return bytes; }
- void readOffsetDelta(DataInputStream stream) throws IOException {
- this.offsetDelta = (usesU2) ? stream.readShort() : stream.readInt();
- this.bytes += (usesU2) ? 2 : 4;
- }
- int getOffsetDelta() { return offsetDelta; }
- void readStackItems(DataInputStream stream, int k) throws IOException {
- this.stackItems = new TypeInfo[k];
- for (int i = 0; i < k; ++i) {
- stackItems[i] = new TypeInfo(stream);
- this.bytes += stackItems[i].getSize();
- }
- }
- void readStackItems(DataInputStream stream) throws IOException {
- int k = (usesU2) ? stream.readShort() : stream.readInt();
- this.bytes += (usesU2) ? 2 : 4;
- readStackItems(stream, k);
- }
- void readLocals(DataInputStream stream, int k) throws IOException {
- this.locals = new TypeInfo[k];
- for (int i = 0; i < k; ++i) {
- locals[i] = new TypeInfo(stream);
- this.bytes += locals[i].getSize();
- }
- }
- void readLocals(DataInputStream stream) throws IOException {
- int k = (usesU2) ? stream.readShort() : stream.readInt();
- this.bytes += (usesU2) ? 2 : 4;
- readLocals(stream, k);
- }
- void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeByte(tag);
- if (tag < 64) {
- //done
- } else if (tag < 128) { // SAME; // 0-63
- assert stackItems.length == 1;
- stackItems[0].writeContentsTo(stream);
- } else if (tag < 248) {
- assert false : "Tags in the range [128-247] are reserved for future use.";
- } else if (tag < 251) {
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- } else if (tag == 251) {
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- } else if (tag < 255) { // APPEND; // 252-254
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- for (int i = 0; i < locals.length; ++i)
- locals[i].writeContentsTo(stream);
- } else {
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- for (int i = 0; i < locals.length; ++i)
- locals[i].writeContentsTo(stream);
- for (int i = 0; i < stackItems.length; ++i)
- stackItems[i].writeContentsTo(stream);
- }
- }
- }
-
- private int computeSize() {
- int size = (usesU2) ? 2 : 4; // number of frames
- Iterator it = entries.iterator();
- while (it.hasNext()) {
- Frame frame = (Frame)it.next();
- size += frame.getSize();
- }
- return size;
- }
-
- private static final String getFrameType(int tag) {
- if (tag < 64) return "same";
- else if (tag < 128) return "same locals 1 stack item";
- else if (tag < 248) return "<reserved>";
- else if (tag < 251) return "chop";
- else if (tag == 251) return "same frame extended";
- else if (tag < 255) return "append";
- else return "full frame";
- }
-
- private static StringBuffer appendTypeInfoArray(StringBuffer buf,
- String s, TypeInfo[] a) {
- buf.append("\n ");
- buf.append(s);
- buf.append(" = ");
- if (a.length > 0) {
- buf.append("[ ");
- for (int i = 0; i < a.length; ++i) {
- if (i > 0) buf.append(", ");
- buf.append(a[i]);
- }
- buf.append(" ]");
- }
- else
- buf.append("[]");
- return buf;
- }
-
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JType.java b/src/fjbg/ch/epfl/lamp/fjbg/JType.java
deleted file mode 100644
index 298a2b0565..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JType.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.ArrayList;
-
-/**
- * Representation of Java types.
- *
- * @version 1.0
- * @author Michel Schinz
- */
-
-abstract public class JType {
- abstract public int getSize();
- abstract public String getSignature();
- abstract public int getTag();
- abstract public String toString();
- abstract public boolean isCompatibleWith(JType other);
-
- public boolean isValueType() { return false; }
- public boolean isObjectType() { return false; }
- public boolean isArrayType() { return false; }
- public boolean isReferenceType() { return false; }
-
- // Tags for types. Taken from BCEL.
- public static final int T_BOOLEAN = 4;
- public static final int T_CHAR = 5;
- public static final int T_FLOAT = 6;
- public static final int T_DOUBLE = 7;
- public static final int T_BYTE = 8;
- public static final int T_SHORT = 9;
- public static final int T_INT = 10;
- public static final int T_LONG = 11;
- public static final int T_VOID = 12; // Non-standard
- public static final int T_ARRAY = 13;
- public static final int T_OBJECT = 14;
- public static final int T_UNKNOWN = 15;
- public static final int T_ADDRESS = 16;
-
- public static final int T_REFERENCE = 17; // type compatible with references
-
- public static final JType[] EMPTY_ARRAY = new JType[0];
-
- protected static JType parseSig(StringReader s) throws IOException {
- int nextChar = s.read();
- if (nextChar == -1) throw new IllegalArgumentException();
-
- switch ((char)nextChar) {
- case 'V' : return VOID;
- case 'Z' : return BOOLEAN;
- case 'B' : return BYTE;
- case 'C' : return CHAR;
- case 'S' : return SHORT;
- case 'I' : return INT;
- case 'F' : return FLOAT;
- case 'J' : return LONG;
- case 'D' : return DOUBLE;
- case 'L': {
- StringBuffer className = new StringBuffer();
- for (;;) {
- nextChar = s.read();
- if (nextChar == -1 || nextChar == ';') break;
- className.append(nextChar == '/' ? ':' : ((char)nextChar));
- }
- if (nextChar != ';') throw new IllegalArgumentException();
- return new JObjectType(className.toString());
- }
- case '[': {
- JType elemType = parseSig(s);
- return new JArrayType(elemType);
- }
- case '(': {
- ArrayList argTps = new ArrayList();
- for (;;) {
- s.mark(1);
- nextChar = s.read();
- if (nextChar == -1 || nextChar == ')') break;
- s.reset();
- argTps.add(parseSig(s));
- }
- if (nextChar != ')') throw new IllegalArgumentException("a");
- JType[] argTpsA = (JType[])argTps.toArray(new JType[argTps.size()]);
- JType returnType = parseSig(s);
- return new JMethodType(returnType, argTpsA);
- }
- default:
- throw new IllegalArgumentException();
- }
- }
-
- /**
- * A signature is a string representing the generic type of a field or
- * method, or generic type information for a class declaration.
- * See section 4.4.4 of the JVM specification.
- */
- public static JType parseSignature(String signature) {
- try {
- StringReader sigReader = new StringReader(signature);
- JType parsed = parseSig(sigReader);
- if (sigReader.read() != -1)
- throw new IllegalArgumentException();
- return parsed;
- } catch (IllegalArgumentException e) {
- throw new IllegalArgumentException("invalid signature " + signature);
- } catch (IOException e) {
- throw new Error(e);
- }
- }
-
- public static int getTotalSize(JType[] types) {
- int size = 0;
- for (int i = 0; i < types.length; ++i)
- size += types[i].getSize();
- return size;
- }
-
- protected JType() {}
-
- public static JType VOID = new JType() {
- public int getSize() { return 0; }
- public String getSignature() { return "V"; }
- public int getTag() { return T_VOID; }
- public String toString() { return "void"; }
- public boolean isCompatibleWith(JType other) {
- throw new UnsupportedOperationException("type VOID is no real "
- + "data type therefore "
- + "cannot be assigned to "
- + other.toString());
- }
- };
-
- public static JType BOOLEAN = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "Z"; }
- public int getTag() { return T_BOOLEAN; }
- public String toString() { return "boolean"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType BYTE = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "B"; }
- public int getTag() { return T_BYTE; }
- public String toString() { return "byte"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType CHAR = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "C"; }
- public int getTag() { return T_CHAR; }
- public String toString() { return "char"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType SHORT = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "S"; }
- public int getTag() { return T_SHORT; }
- public String toString() { return "short"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType INT = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "I"; }
- public int getTag() { return T_INT; }
- public String toString() { return "int"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType FLOAT = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "F"; }
- public int getTag() { return T_FLOAT; }
- public String toString() { return "float"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == FLOAT;
- }
- };
-
- public static JType LONG = new JType() {
- public int getSize() { return 2; }
- public String getSignature() { return "J"; }
- public int getTag() { return T_LONG; }
- public String toString() { return "long"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == LONG;
- }
- };
-
- public static JType DOUBLE = new JType() {
- public int getSize() { return 2; }
- public String getSignature() { return "D"; }
- public int getTag() { return T_DOUBLE; }
- public String toString() { return "double"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == DOUBLE;
- }
- };
-
- public static JType REFERENCE = new JType() {
- public int getSize() { return 1; }
- public String getSignature() {
- throw new UnsupportedOperationException("type REFERENCE is no real "
- + "data type and therefore "
- + "has no signature");
- }
- public int getTag() { return T_REFERENCE; }
- public String toString() { return "<reference>"; }
- public boolean isCompatibleWith(JType other) {
- throw new UnsupportedOperationException("type REFERENCE is no real "
- + "data type and therefore "
- + "cannot be assigned to "
- + other.toString());
- }
- };
-
- public static JType ADDRESS = new JType() {
- public int getSize() { return 1; }
- public String getSignature() {
- throw new UnsupportedOperationException("type ADDRESS is no usable "
- + "data type and therefore "
- + "has no signature");
- }
- public int getTag() { return T_ADDRESS; }
- public String toString() { return "<address>"; }
- public boolean isCompatibleWith(JType other) {
- return other == ADDRESS;
- }
- };
-
- public static JType UNKNOWN = new JType() {
- public int getSize() {
- throw new UnsupportedOperationException("type UNKNOWN is no real "
- + "data type and therefore "
- + "has no size");
- }
- public String getSignature() {
- throw new UnsupportedOperationException("type UNKNOWN is no real "
- + "data type and therefore "
- + "has no signature");
- }
- public int getTag() { return T_UNKNOWN; }
- public String toString() { return "<unknown>"; }
- public boolean isCompatibleWith(JType other) {
- throw new UnsupportedOperationException("type UNKNOWN is no real "
- + "data type and therefore "
- + "cannot be assigned to "
- + other.toString());
- }
- };
-
- protected static String tagToString(int tag) {
- switch (tag) {
- case T_BOOLEAN : return "boolean";
- case T_CHAR : return "char";
- case T_FLOAT : return "float";
- case T_DOUBLE : return "double";
- case T_BYTE : return "byte";
- case T_SHORT : return "short";
- case T_INT : return "int";
- case T_LONG : return "long";
- case T_VOID : return "void"; // Non-standard
- case T_ARRAY : return "[]";
- case T_OBJECT : return "Object";
- case T_UNKNOWN : return "<unknown>";
- case T_ADDRESS : return "<address>";
- default: return String.valueOf(tag);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/Main.java b/src/fjbg/ch/epfl/lamp/fjbg/Main.java
deleted file mode 100644
index 810ee7c400..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/Main.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.jar.JarFile;
-import java.util.zip.ZipEntry;
-
-/**
- * Main program entry to execute the FJBG reader from the command line.
- *
- * The reader prints out the decoded data in the same output format as
- * javap, the Java bytecode disassembler of the Sun J2SE SDK.
- *
- * @author Stephane Micheloud
- * @version 1.1
- */
-
-public class Main {
- private static final String PRODUCT_STRING = "Fast Java Bytecode Generator";
- private static final String VERSION_STRING = "version 1.1";
-
- private static final int ACTION_USAGE = 0;
- private static final int ACTION_DONE = 1;
- private static final int ACTION_PROCEED = 2;
-
- private static String classPath = ".";
- private static String[] classNames = null;
-
- public static void main(String[] args) {
- switch (parseArgs(args)) {
- case ACTION_USAGE: printUsage(); break;
- case ACTION_PROCEED: processClasses(); break;
- default:
- }
- }
-
- private static void processClasses() {
- FJBGContext fjbgContext = new FJBGContext(49, 0);
- if (classNames.length > 0)
- try {
- for (int i = 0; i < classNames.length; ++i)
- processClass(fjbgContext, classNames[i]);
- } catch (IOException e) {
- System.err.println(e.getMessage());
- }
- else
- System.err.println(
- "No classes were specified on the command line. Try -help.");
- }
-
- private static void processClass(FJBGContext fjbgContext, String className)
- throws IOException {
- InputStream in = getInputStream(className);
- JClass jclass = fjbgContext.JClass(new DataInputStream(in));
- System.out.println(jclass);
- in.close();
- }
-
- private static InputStream getInputStream(String className) throws IOException {
- String name = null;
- String[] paths = classPath.split(File.pathSeparator);
- for (int i = 0; i < paths.length; ++i) {
- File parent = new File(paths[i]);
- if (parent.isDirectory()) {
- name = className.replace('.', File.separatorChar)+".class";
- File f = new File(parent, name);
- if (f.isFile()) return new FileInputStream(f);
- } else if (paths[i].endsWith(".jar")) {
- JarFile f = new JarFile(parent);
- name = className.replace('.', '/')+".class";
- ZipEntry e = f.getEntry(name);
- if (e != null) return f.getInputStream(e);
- }
- }
- throw new IOException("ERROR:Could not find "+className);
- }
-
- private static int parseArgs(String[] args) {
- ArrayList/*<String>*/ classes = new ArrayList();
- String arg = null;
- int action = ACTION_USAGE;
- int i = 0, n = args.length;
- while (i < n) {
- arg = args[i];
- if (arg.equals("-classpath") && (i+1) < n) {
- classPath = args[i+1]; i += 2;
- } else if (arg.equals("-cp") && (i+1) < n) {
- classPath = args[i+1]; i += 2;
- } else if (arg.equals("-help")) {
- i = n+1;
- //} else if (arg.equals("-v")) {
- // verbose = true; i += 1;
- } else if (arg.equals("-version")) {
- System.err.println(PRODUCT_STRING+" "+VERSION_STRING);
- action = ACTION_DONE; i = n+1;
- } else if (arg.startsWith("-")) {
- System.err.println("invalid flag: "+arg);
- i = n+1;
- } else {
- classes.add(arg); i += 1;
- }
- }
- if (i == n && i > 0) {
- classNames = (String[])classes.toArray(new String[classes.size()]);
- action = ACTION_PROCEED;
- }
- return action;
- }
-
- private static void printUsage() {
- System.out.println("Usage: fjbg <options> <classes>");
- System.out.println();
- System.out.println("where possible options include:");
- System.out.println(" -cp <path> Specify where to find user class files");
- System.out.println(" -classpath <path> Specify where to find user class files");
- System.out.println(" -help Print a synopsis of standard options");
- System.out.println(" -version Version information");
- System.out.println();
- System.exit(1);
- }
-}
-
diff --git a/src/fjbg/ch/epfl/lamp/util/ByteArray.java b/src/fjbg/ch/epfl/lamp/util/ByteArray.java
deleted file mode 100644
index b852e1ac1f..0000000000
--- a/src/fjbg/ch/epfl/lamp/util/ByteArray.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- * Array of bytes.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class ByteArray {
- protected final static int BYTE_BLOCK_BITS = 8;
- protected final static int BYTE_BLOCK_SIZE = 1 << BYTE_BLOCK_BITS;
- protected final static int BYTE_BLOCK_MASK = BYTE_BLOCK_SIZE - 1;
-
- protected byte[][] data = new byte[][] { new byte[BYTE_BLOCK_SIZE] };
- protected int pos = 0; // The next free position.
-
- protected boolean frozen = false;
-
- public ByteArray() { }
-
- public ByteArray(InputStream stream, int size) throws IOException {
- pos = size;
- for (int i = 0; size > 0; ++i) {
- int sizeToRead = Math.min(BYTE_BLOCK_SIZE, size);
- stream.read(data[i], 0, sizeToRead);
-
- size -= sizeToRead;
- if (size > 0) addNewBlock();
- }
- }
-
- public void freeze() { frozen = true; }
-
- public int nextBytePosition() {
- return pos;
- }
-
- public int getSize() {
- return pos;
- }
-
- protected void addNewBlock() {
- int nextBlockPos = pos >>> BYTE_BLOCK_BITS;
- if (nextBlockPos == data.length) {
- byte[][] newData = new byte[data.length * 2][];
- System.arraycopy(data, 0, newData, 0, data.length);
- data = newData;
- }
- assert data[nextBlockPos] == null : pos + " " + nextBlockPos;
- data[nextBlockPos] = new byte[BYTE_BLOCK_SIZE];
- }
-
- protected void addByte(int b) {
- assert !frozen;
-
- if ((pos & BYTE_BLOCK_MASK) == 0 && pos > 0)
- addNewBlock();
- int currPos = pos++;
- data[currPos >>> BYTE_BLOCK_BITS][currPos & BYTE_BLOCK_MASK] = (byte)b;
- }
-
- public void addU1(int i) {
- assert i <= 0xFF : i;
- addByte(i);
- }
-
- public void addU2(int i) {
- assert i <= 0xFFFF : i;
-
- addByte(i >>> 8);
- addByte(i & 0xFF);
- }
-
- public void addU4(int i) {
- addByte(i >>> 24);
- addByte((i >>> 16) & 0xFF);
- addByte((i >>> 8) & 0xFF);
- addByte(i & 0xFF);
- }
-
- public void putByte(int targetPos, int b) {
- assert !frozen;
- assert targetPos < pos : targetPos + " >= " + pos;
-
- data[targetPos >>> BYTE_BLOCK_BITS][targetPos & BYTE_BLOCK_MASK] = (byte)b;
- }
-
- public void putU2(int targetPos, int i) {
- assert i < 0xFFFF : i;
- putByte(targetPos, i >>> 8);
- putByte(targetPos + 1, i & 0xFF);
- }
-
- public void putU4(int targetPos, int i) {
- putByte(targetPos, i >>> 24);
- putByte(targetPos + 1, (i >>> 16) & 0xFF);
- putByte(targetPos + 2, (i >>> 8) & 0xFF);
- putByte(targetPos + 3, i & 0xFF);
- }
-
- public int getU1(int sourcePos) {
- assert sourcePos < pos : sourcePos + " >= " + pos;
- return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK] & 0xFF;
- }
-
- public int getU2(int sourcePos) {
- return (getU1(sourcePos) << 8) | getU1(sourcePos + 1);
- }
-
- public int getU4(int sourcePos) {
- return (getU2(sourcePos) << 16) | getU2(sourcePos + 2);
- }
-
- public int getS1(int sourcePos) {
- assert sourcePos < pos : sourcePos + " >= " + pos;
- return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK];
- }
-
- public int getS2(int sourcePos) {
- return (getS1(sourcePos) << 8) | getU1(sourcePos + 1);
- }
-
- public int getS4(int sourcePos) {
- return (getS2(sourcePos) << 16) | getU2(sourcePos + 2);
- }
-
- public void writeTo(OutputStream stream) throws IOException {
- if (!frozen) freeze();
-
- for (int i = 0; i < data.length && data[i] != null; ++i) {
- int len = Math.min(BYTE_BLOCK_SIZE, pos - (i << BYTE_BLOCK_BITS));
- stream.write(data[i], 0, len);
- }
- }
-}
diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE
index 696c347b7b..f8b1f31327 100644
--- a/src/intellij/compiler.iml.SAMPLE
+++ b/src/intellij/compiler.iml.SAMPLE
@@ -20,8 +20,6 @@
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="asm" />
- <orderEntry type="module" module-name="fjbg" />
- <orderEntry type="module" module-name="msil" />
<orderEntry type="library" name="ant" level="application" />
<orderEntry type="library" name="jline" level="project" />
</component>
diff --git a/src/intellij/msil.iml.SAMPLE b/src/intellij/continuations-library.iml.SAMPLE
index 56f794785f..364cc3dcdb 100644
--- a/src/intellij/msil.iml.SAMPLE
+++ b/src/intellij/continuations-library.iml.SAMPLE
@@ -12,9 +12,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/../msil">
- <sourceFolder url="file://$MODULE_DIR$/../msil" isTestSource="false" />
- <excludeFolder url="file://$MODULE_DIR$/../msil/ch/epfl/lamp/compiler/msil/tests" />
+ <content url="file://$MODULE_DIR$/../continuations/library">
+ <sourceFolder url="file://$MODULE_DIR$/../continuations/library" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/src/intellij/continuations-plugin.iml.SAMPLE b/src/intellij/continuations-plugin.iml.SAMPLE
new file mode 100644
index 0000000000..27213374b3
--- /dev/null
+++ b/src/intellij/continuations-plugin.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../continuations/plugin">
+ <sourceFolder url="file://$MODULE_DIR$/../continuations/plugin" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="reflect" />
+ </component>
+</module>
+
diff --git a/src/intellij/fjbg.iml.SAMPLE b/src/intellij/fjbg.iml.SAMPLE
deleted file mode 100644
index 03eca69246..0000000000
--- a/src/intellij/fjbg.iml.SAMPLE
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
- <component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />
- <content url="file://$MODULE_DIR$/../fjbg">
- <sourceFolder url="file://$MODULE_DIR$/../fjbg" isTestSource="false" />
- </content>
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- </component>
-</module>
-
diff --git a/src/intellij/interactive.iml.SAMPLE b/src/intellij/interactive.iml.SAMPLE
new file mode 100644
index 0000000000..c6c8ebb606
--- /dev/null
+++ b/src/intellij/interactive.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../interactive">
+ <sourceFolder url="file://$MODULE_DIR$/../interactive" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="scaladoc" />
+ </component>
+</module>
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index 37307c2029..e470e019c9 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -198,14 +198,16 @@
<module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
<module fileurl="file://$PROJECT_DIR$/asm.iml" filepath="$PROJECT_DIR$/asm.iml" />
<module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/fjbg.iml" filepath="$PROJECT_DIR$/fjbg.iml" />
+ <module fileurl="file://$PROJECT_DIR$/continuations-library.iml" filepath="$PROJECT_DIR$/continuations-library.iml" />
+ <module fileurl="file://$PROJECT_DIR$/continuations-plugin.iml" filepath="$PROJECT_DIR$/continuations-plugin.iml" />
<module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
+ <module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
<module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
<module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
- <module fileurl="file://$PROJECT_DIR$/msil.iml" filepath="$PROJECT_DIR$/msil.iml" />
<module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
<module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
<module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
@@ -230,7 +232,6 @@
<root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
<root url="file://$PROJECT_DIR$/../../build/locker/classes/compiler" />
<root url="file://$PROJECT_DIR$/../../build/locker/classes/reflect" />
- <root url="file://$PROJECT_DIR$/../../build/libs/classes/fjbg" />
<root url="file://$PROJECT_DIR$/../../build/asm/classes" />
</CLASSES>
<JAVADOC />
diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE
new file mode 100644
index 0000000000..6cc609919c
--- /dev/null
+++ b/src/intellij/scaladoc.iml.SAMPLE
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../scaladoc">
+ <sourceFolder url="file://$MODULE_DIR$/../scaladoc" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="compiler" />
+ </component>
+</module>
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
index 112fec428f..3ce369be05 100644
--- a/src/intellij/test.iml.SAMPLE
+++ b/src/intellij/test.iml.SAMPLE
@@ -12,9 +12,7 @@
<orderEntry type="module" module-name="swing" />
<orderEntry type="module" module-name="partest" />
<orderEntry type="module" module-name="asm" />
- <orderEntry type="module" module-name="fjbg" />
<orderEntry type="module" module-name="forkjoin" />
- <orderEntry type="module" module-name="msil" />
</component>
</module>
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
index af82957a2e..f84fa161c0 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -7,8 +7,6 @@ package interactive
import scala.util.control.ControlThrowable
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
import scala.tools.nsc.util.FailedInterrupt
import scala.tools.nsc.util.EmptyAction
import scala.tools.nsc.util.WorkScheduler
@@ -139,7 +137,6 @@ trait CompilerControl { self: Global =>
/** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
* @pre `source` needs to be loaded.
- *
* @note Deprecated because of race conditions in the typechecker when the background compiler
* is interrupted while typing the same `source`.
* @see SI-6578
@@ -241,9 +238,6 @@ trait CompilerControl { self: Global =>
* prints its output and all defined values in a comment column.
*
* @param source The source file to be analyzed
- * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
- * If keepLoaded is `false` the operation is run at low priority, only after
- * everything is brought up to date in a regular type checker run.
* @param response The response.
*/
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
@@ -269,7 +263,7 @@ trait CompilerControl { self: Global =>
* compiler thread.
*/
def parseTree(source: SourceFile): Tree = {
- new UnitParser(new CompilationUnit(source)).parse()
+ newUnitParser(new CompilationUnit(source)).parse()
}
/** Asks for a computation to be done quickly on the presentation compiler thread */
@@ -345,7 +339,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskTypeAtItem(val pos: Position, response: Response[Tree]) extends WorkItem {
+ case class AskTypeAtItem(pos: Position, response: Response[Tree]) extends WorkItem {
def apply() = self.getTypedTreeAt(pos, response)
override def toString = "typeat "+pos.source+" "+pos.show
@@ -353,7 +347,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskTypeItem(val source: SourceFile, val forceReload: Boolean, response: Response[Tree]) extends WorkItem {
+ case class AskTypeItem(source: SourceFile, forceReload: Boolean, response: Response[Tree]) extends WorkItem {
def apply() = self.getTypedTree(source, forceReload, response)
override def toString = "typecheck"
@@ -361,7 +355,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskTypeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ case class AskTypeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
def apply() = self.getTypeCompletion(pos, response)
override def toString = "type completion "+pos.source+" "+pos.show
@@ -369,7 +363,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskScopeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ case class AskScopeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
def apply() = self.getScopeCompletion(pos, response)
override def toString = "scope completion "+pos.source+" "+pos.show
@@ -387,7 +381,7 @@ trait CompilerControl { self: Global =>
def raiseMissing() = ()
}
- case class AskLinkPosItem(val sym: Symbol, val source: SourceFile, response: Response[Position]) extends WorkItem {
+ case class AskLinkPosItem(sym: Symbol, source: SourceFile, response: Response[Position]) extends WorkItem {
def apply() = self.getLinkPos(sym, source, response)
override def toString = "linkpos "+sym+" in "+source
@@ -395,7 +389,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskDocCommentItem(val sym: Symbol, val source: SourceFile, val site: Symbol, val fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
+ case class AskDocCommentItem(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
def apply() = self.getDocComment(sym, source, site, fragments, response)
override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
@@ -403,7 +397,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem {
+ case class AskLoadedTypedItem(source: SourceFile, response: Response[Tree]) extends WorkItem {
def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread)
override def toString = "wait loaded & typed "+source
@@ -411,7 +405,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskParsedEnteredItem(val source: SourceFile, val keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+ case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
@@ -420,7 +414,7 @@ trait CompilerControl { self: Global =>
}
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- case class AskInstrumentedItem(val source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem {
+ case class AskInstrumentedItem(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem {
def apply() = self.getInstrumented(source, line, response)
override def toString = "getInstrumented "+source
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
index b2568e34bd..93ef4c4d6c 100644
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
@@ -6,7 +6,6 @@ package scala.tools.nsc
package interactive
import scala.collection.mutable.ArrayBuffer
-import scala.reflect.internal.util.Position
trait ContextTrees { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 1f2245abb5..099a882f10 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -8,31 +8,94 @@ package interactive
import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
import scala.collection.mutable
import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
-import scala.concurrent.SyncVar
import scala.util.control.ControlThrowable
import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
-import scala.tools.nsc.util.{ WorkScheduler, MultiHashMap }
-import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition }
+import scala.tools.nsc.util.MultiHashMap
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.io.Pickler._
-import scala.tools.nsc.typechecker.DivergentImplicit
-import scala.annotation.tailrec
+import scala.tools.nsc.doc.ScaladocAnalyzer
+import scala.tools.nsc.typechecker.{ Analyzer, DivergentImplicit }
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
-import scala.annotation.elidable
+import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
+trait InteractiveScaladocAnalyzer extends InteractiveAnalyzer with ScaladocAnalyzer {
+ val global : Global
+ import global._
+ override def newTyper(context: Context) = new Typer(context) with InteractiveTyper with ScaladocTyper {
+ override def canAdaptConstantTypeToLiteral = false
+ }
+}
+
+trait InteractiveAnalyzer extends Analyzer {
+ val global : Global
+ import global._
+
+ override def newTyper(context: Context): InteractiveTyper = new Typer(context) with InteractiveTyper
+ override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer
+ override protected def newPatternMatching = false
+
+ trait InteractiveTyper extends Typer {
+ override def canAdaptConstantTypeToLiteral = false
+ override def canTranslateEmptyListToNil = false
+ override def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree match {
+ case Select(_, _) => treeCopy.Select(tree, qual, name)
+ case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ }
+ }
+
+ trait InteractiveNamer extends Namer {
+ override def saveDefaultGetter(meth: Symbol, default: Symbol) {
+ // save the default getters as attachments in the method symbol. if compiling the
+ // same local block several times (which can happen in interactive mode) we might
+ // otherwise not find the default symbol, because the second time it the method
+ // symbol will be re-entered in the scope but the default parameter will not.
+ meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
+ case Some(att) => att.defaultGetters += default
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
+ }
+ }
+ // this logic is needed in case typer was interrupted half
+ // way through and then comes back to do the tree again. In
+ // that case the definitions that were already attributed as
+ // well as any default parameters of such methods need to be
+ // re-entered in the current scope.
+ override def enterExistingSym(sym: Symbol): Context = {
+ if (sym != null && sym.owner.isTerm) {
+ enterIfNotThere(sym)
+ if (sym.isLazy)
+ sym.lazyAccessor andAlso enterIfNotThere
+
+ for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
+ defAtt.defaultGetters foreach enterIfNotThere
+ }
+ super.enterExistingSym(sym)
+ }
+ override def enterIfNotThere(sym: Symbol) {
+ val scope = context.scope
+ @tailrec def search(e: ScopeEntry) {
+ if ((e eq null) || (e.owner ne scope))
+ scope enter sym
+ else if (e.sym ne sym) // otherwise, aborts since we found sym
+ search(e.tail)
+ }
+ search(scope lookupEntry sym.name)
+ }
+ }
+}
+
+
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
-class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
+class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
/* Is the compiler initializing? Early def, so that the field is true during the
* execution of the super constructor.
*/
private var initializing = true
+ override val useOffsetPositions = false
} with scala.tools.nsc.Global(settings, _reporter)
with CompilerControl
- with RangePositions
with ContextTrees
with RichCompilationUnits
with ScratchPadMaker
@@ -72,8 +135,26 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
@inline final def informIDE(msg: => String) =
if (verboseIDE) println("[%s][%s]".format(projectName, msg))
+ // don't keep the original owner in presentation compiler runs
+ // (the map will grow indefinitely, and the only use case is the backend)
+ override protected def saveOriginalOwner(sym: Symbol) { }
+ override protected def originalEnclosingMethod(sym: Symbol) =
+ abort("originalOwner is not kept in presentation compiler runs.")
+
override def forInteractive = true
+ override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+ new InteractiveAsSeenFromMap(pre, clazz)
+
+ class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) {
+ /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary,
+ * which it is currently supposed it is not.
+ *
+ * If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable
+ * method rather than aborting in the failure case.
+ */
+ }
+
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
@@ -131,6 +212,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
+ override lazy val analyzer = new {
+ val global: Global.this.type = Global.this
+ } with InteractiveAnalyzer
+
private def cleanAllResponses() {
cleanResponses(waitLoadedTypeResponses)
cleanResponses(getParsedEnteredResponses)
@@ -285,7 +370,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
* top-level idents. Therefore, we can detect top-level symbols that have a name
* different from their source file
*/
- override lazy val loaders = new BrowsingLoaders {
+ override lazy val loaders: SymbolLoaders { val global: Global.this.type } = new BrowsingLoaders {
val global: Global.this.type = Global.this
}
@@ -363,14 +448,14 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
// don't forget to service interrupt requests
- val iqs = scheduler.dequeueAllInterrupts(_.execute())
+ scheduler.dequeueAllInterrupts(_.execute())
debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
.format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
checkNoResponsesOutstanding()
- log.flush();
+ log.flush()
scheduler = new NoWorkScheduler
throw ShutdownReq
}
@@ -401,41 +486,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
if (typerRun != currentTyperRun) demandNewCompilerRun()
}
- def debugInfo(source : SourceFile, start : Int, length : Int): String = {
- println("DEBUG INFO "+source+"/"+start+"/"+length)
- val end = start+length
- val pos = rangePos(source, start, start, end)
-
- val tree = locateTree(pos)
- val sw = new StringWriter
- val pw = new PrintWriter(sw)
- newTreePrinter(pw).print(tree)
- pw.flush
-
- val typed = new Response[Tree]
- askTypeAt(pos, typed)
- val typ = typed.get.left.toOption match {
- case Some(tree) =>
- val sw = new StringWriter
- val pw = new PrintWriter(sw)
- newTreePrinter(pw).print(tree)
- pw.flush
- sw.toString
- case None => "<None>"
- }
-
- val completionResponse = new Response[List[Member]]
- askTypeCompletion(pos, completionResponse)
- val completion = completionResponse.get.left.toOption match {
- case Some(members) =>
- members mkString "\n"
- case None => "<None>"
- }
-
- source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+
- ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion
- }
-
// ----------------- The Background Runner Thread -----------------------
private var threadId = 0
@@ -794,10 +844,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
try {
val tp1 = pre.memberType(alt) onTypeError NoType
val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
- matchesType(tp1, tp2, false) || {
+ matchesType(tp1, tp2, alwaysMatchSimple = false) || {
debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
- matchesType(tp1, tp3, false) || {
+ matchesType(tp1, tp3, alwaysMatchSimple = false) || {
debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
false
}
@@ -898,8 +948,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
respond(response) { scopeMembers(pos) }
}
- private val Dollar = newTermName("$")
-
private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
override def default(key: Name) = Set()
@@ -915,7 +963,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
add(sym.accessed, pre, implicitlyAdded)(toMember)
- } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
+ } else if (!sym.name.decodedName.containsName("$") && !sym.isSynthetic && sym.hasRawInfo) {
val symtpe = pre.memberType(sym) onTypeError ErrorType
matching(sym, symtpe, this(sym.name)) match {
case Some(m) =>
@@ -945,8 +993,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val locals = new Members[ScopeMember]
val enclosing = new Members[ScopeMember]
def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
- locals.add(sym, pre, false) { (s, st) =>
- new ScopeMember(s, st, context.isAccessible(s, pre, false), viaImport)
+ locals.add(sym, pre, implicitlyAdded = false) { (s, st) =>
+ new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport)
}
def localsToEnclosing() = {
enclosing.addNonShadowed(locals)
@@ -1038,7 +1086,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
for (sym <- ownerTpe.members)
addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
members.allMembers #:: {
- //print("\nadd pimped")
+ //print("\nadd enrichment")
val applicableViews: List[SearchResult] =
if (ownerTpe.isErroneous) List()
else new ImplicitSearch(
@@ -1048,7 +1096,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val vtree = viewApply(view)
val vpre = stabilizedType(vtree)
for (sym <- vtree.tpe.members) {
- addTypeMember(sym, vpre, false, view.tree.symbol)
+ addTypeMember(sym, vpre, inherited = false, view.tree.symbol)
}
}
//println()
@@ -1061,7 +1109,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
getUnit(source) match {
case Some(unit) =>
if (unit.isUpToDate) {
- debugLog("already typed");
+ debugLog("already typed")
response set unit.body
} else if (ignoredFiles(source.file)) {
response.raise(lastException.getOrElse(CancelException))
@@ -1108,7 +1156,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
+ def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) {
try {
interruptsEnabled = false
respond(response) {
@@ -1117,18 +1165,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
} finally {
interruptsEnabled = true
}
+ }
// ---------------- Helper classes ---------------------------
- /** A transformer that replaces tree `from` with tree `to` in a given tree */
- class TreeReplacer(from: Tree, to: Tree) extends Transformer {
- override def transform(t: Tree): Tree = {
- if (t == from) to
- else if ((t.pos includes from.pos) || t.pos.isTransparent) super.transform(t)
- else t
- }
- }
-
/** The typer run */
class TyperRun extends Run {
// units is always empty
@@ -1148,7 +1188,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
* @return true iff typechecked correctly
*/
private def applyPhase(phase: Phase, unit: CompilationUnit) {
- atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
+ enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
index 013b152e96..013b152e96 100644
--- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
+++ b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala
new file mode 100644
index 0000000000..3b4a36f62d
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Main.scala
@@ -0,0 +1,34 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools
+package nsc
+package interactive
+
+/** The main class for NSC, a compiler for the programming
+ * language Scala.
+ */
+object Main extends nsc.MainClass {
+ override def processSettingsHook(): Boolean = {
+ if (this.settings.Yidedebug.value) {
+ this.settings.Xprintpos.value = true
+ this.settings.Yrangepos.value = true
+ val compiler = new interactive.Global(this.settings, this.reporter)
+ import compiler.{ reporter => _, _ }
+
+ val sfs = command.files map getSourceFile
+ val reloaded = new interactive.Response[Unit]
+ askReload(sfs, reloaded)
+
+ reloaded.get.right.toOption match {
+ case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
+ case None => reporter.reset() // Causes other compiler errors to be ignored
+ }
+ askShutdown
+ false
+ }
+ else true
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
index 2b389158c3..b184afd0f5 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
@@ -6,12 +6,10 @@ package scala.tools.nsc
package interactive
import util.InterruptReq
-import scala.reflect.internal.util.{SourceFile, BatchSourceFile}
-import io.{AbstractFile, PlainFile}
-
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
+import io.{ AbstractFile, PlainFile, Pickler, CondPickler }
import util.EmptyAction
-import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition}
-import io.{Pickler, CondPickler}
+import scala.reflect.internal.util.{ RangePosition, OffsetPosition, TransparentPosition }
import io.Pickler._
import scala.collection.mutable
import mutable.ListBuffer
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
index a2d8e5d49a..a2d8e5d49a 100644
--- a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
+++ b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala
index 7b89d5b0aa..04c06b9357 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala
@@ -5,15 +5,11 @@
package scala.tools.nsc
package interactive
-import scala.concurrent.SyncVar
import scala.reflect.internal.util._
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
import scala.tools.nsc.reporters._
import scala.tools.nsc.io._
import scala.tools.nsc.scratchpad.SourceInserter
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import java.io.{File, FileWriter}
+import java.io.FileWriter
/** Interface of interactive compiler to a client such as an IDE
*/
@@ -55,21 +51,21 @@ object REPL {
} catch {
case ex @ FatalError(msg) =>
if (true || command.settings.debug.value) // !!!
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
}
}
}
def main(args: Array[String]) {
process(args)
- /*sys.*/exit(if (reporter.hasErrors) 1 else 0)// Don't use sys yet as this has to run on 2.8.2 also.
+ sys.exit(if (reporter.hasErrors) 1 else 0)
}
def loop(action: (String) => Unit) {
Console.print(prompt)
try {
- val line = Console.readLine
+ val line = Console.readLine()
if (line.length() > 0) {
action(line)
}
@@ -111,7 +107,7 @@ object REPL {
}
def doStructure(file: String) {
- comp.askParsedEntered(toSourceFile(file), false, structureResult)
+ comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult)
show(structureResult)
}
@@ -186,7 +182,7 @@ object REPL {
println(instrument(arguments, line.toInt))
case List("quit") =>
comp.askShutdown()
- exit(1) // Don't use sys yet as this has to run on 2.8.2 also.
+ sys.exit(1)
case List("structure", file) =>
doStructure(file)
case _ =>
diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
new file mode 100644
index 0000000000..c57e1da184
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
@@ -0,0 +1,14 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package interactive
+
+@deprecated("Use scala.reflect.internal.Positions", "2.11.0")
+trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions {
+ self: scala.tools.nsc.Global =>
+
+ override def useOffsetPositions = false
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala
index f36f769ec9..f36f769ec9 100644
--- a/src/compiler/scala/tools/nsc/interactive/Response.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Response.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
index b83c2cd095..b83c2cd095 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
index 7f0265bf4f..7af9174704 100644
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -163,7 +163,7 @@ trait ScratchPadMaker { self: Global =>
while (scanner.token != EOF) {
startOffset += scanner.offset
token += scanner.token
- scanner.nextToken
+ scanner.nextToken()
endOffset += scanner.lastOffset
}
@@ -191,7 +191,7 @@ trait ScratchPadMaker { self: Global =>
* prints its output and all defined values in a comment column.
*/
protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = {
- val tree = typedTree(source, true)
+ val tree = typedTree(source, forceReload = true)
val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1)
val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset)
patcher.traverse(tree)
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index 597b9012ce..a4a2de9b51 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -7,14 +7,6 @@ package interactive
package tests
import core._
-
-import java.io.File.pathSeparatorChar
-import java.io.File.separatorChar
-
-import scala.annotation.migration
-import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.SourceFile
-
import scala.collection.mutable.ListBuffer
/** A base class for writing interactive compiler tests.
@@ -109,6 +101,7 @@ abstract class InteractiveTest
}
/** Perform n random tests with random changes. */
+ /****
private def randomTests(n: Int, files: Array[SourceFile]) {
val tester = new Tester(n, files, settings) {
override val compiler = self.compiler
@@ -116,6 +109,7 @@ abstract class InteractiveTest
}
tester.run()
}
+ ****/
/** shutdown the presentation compiler. */
protected def shutdown() {
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
index 4d85ab9d88..ad5c61b2b0 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
@@ -25,7 +25,6 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst
* test.
*/
override protected def prepareSettings(settings: Settings) {
- import java.io.File._
def adjustPaths(paths: settings.PathSetting*) {
for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
case '/' => separatorChar
@@ -45,10 +44,10 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst
case _ => ()
}
- // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
+ // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
if(settings.sourcepath.isSetByUser)
settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
-
+
adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
}
@@ -67,4 +66,4 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst
reporter.println("\targsString: %s".format(argsString))
super.printClassPath(reporter)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
index 26aabbd3e6..9382d5890f 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
@@ -199,7 +199,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
object Tester {
def main(args: Array[String]) {
val settings = new Settings()
- val (_, filenames) = settings.processArguments(args.toList.tail, true)
+ val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true)
println("filenames = "+filenames)
val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile)
new Tester(args(0).toInt, files, settings).run()
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
index 8d446cbbf8..8d446cbbf8 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
index c8e6b6ccce..9085eb56e6 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -3,7 +3,6 @@ package interactive
package tests.core
import scala.reflect.internal.util.Position
-import scala.tools.nsc.interactive.tests.core._
/** Set of core test definitions that are executed for each test run. */
private[tests] trait CoreTestDefs
@@ -77,7 +76,8 @@ private[tests] trait CoreTestDefs
// askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
- val sourceFile = sourceFiles.find(_.path == treePath) match {
+
+ sourceFiles.find(_.path == treePath) match {
case Some(source) =>
compiler.askLinkPos(tree.symbol, source, r)
r.get match {
@@ -97,4 +97,4 @@ private[tests] trait CoreTestDefs
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index f304eda753..9a2abd5139 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -3,7 +3,6 @@ package interactive
package tests.core
import reporters.{Reporter => CompilerReporter}
-import scala.reflect.internal.util.Position
/** Trait encapsulating the creation of a presentation compiler's instance.*/
private[tests] trait PresentationCompilerInstance extends TestSettings {
@@ -14,11 +13,16 @@ private[tests] trait PresentationCompilerInstance extends TestSettings {
override def compiler = PresentationCompilerInstance.this.compiler
}
+ private class ScaladocEnabledGlobal extends Global(settings, compilerReporter) {
+ override lazy val analyzer = new {
+ val global: ScaladocEnabledGlobal.this.type = ScaladocEnabledGlobal.this
+ } with InteractiveScaladocAnalyzer
+ }
+
protected lazy val compiler: Global = {
prepareSettings(settings)
- new Global(settings, compilerReporter) {
- override def forScaladoc = withDocComments
- }
+ if (withDocComments) new ScaladocEnabledGlobal
+ else new Global(settings, compilerReporter)
}
/**
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
index b5ae5f2d75..b5ae5f2d75 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
index 9cf2aa4fe4..4d5b4e1129 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
@@ -1,6 +1,5 @@
package scala.tools.nsc.interactive.tests.core
-import scala.tools.nsc.interactive.Global
import scala.reflect.internal.util.Position
trait PresentationCompilerTestDef {
@@ -16,4 +15,4 @@ trait PresentationCompilerTestDef {
protected def format(pos: Position): String =
(if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "<no position>")
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
index 631504cda5..631504cda5 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
index e80b741a8d..676feeba8a 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
@@ -4,7 +4,6 @@ import scala.reflect.internal.util.{SourceFile,BatchSourceFile}
import scala.tools.nsc.io.{AbstractFile,Path}
private[tests] object SourcesCollector {
- import Path._
type SourceFilter = Path => Boolean
/**
@@ -17,6 +16,5 @@ private[tests] object SourcesCollector {
}
private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
- private def source(filename: String): SourceFile = source(AbstractFile.getFile(filename))
private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
index ba1722382b..a5c228a549 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
@@ -16,7 +16,7 @@ object TestMarker {
}
}
-abstract case class TestMarker(val marker: String) {
+abstract case class TestMarker(marker: String) {
TestMarker.checkForDuplicate(this)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
index 887c3cf29b..887c3cf29b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
index 681204172b..681204172b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
index 9df42708bb..a375b84a5c 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
@@ -1712,7 +1712,7 @@ public class ConsoleReader
}
/**
- * Output a platform-dependant newline.
+ * Output a platform-dependent newline.
*/
public final void println() throws IOException {
print(CR);
diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala
index 07a9ffa8d5..1c25989c30 100644
--- a/src/library-aux/scala/Any.scala
+++ b/src/library-aux/scala/Any.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library-aux/scala/AnyRef.scala b/src/library-aux/scala/AnyRef.scala
index 7d8b9f9e76..362fbcf0f5 100644
--- a/src/library-aux/scala/AnyRef.scala
+++ b/src/library-aux/scala/AnyRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library-aux/scala/Nothing.scala b/src/library-aux/scala/Nothing.scala
index eed6066039..57f6fac3f9 100644
--- a/src/library-aux/scala/Nothing.scala
+++ b/src/library-aux/scala/Nothing.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library-aux/scala/Null.scala b/src/library-aux/scala/Null.scala
index 7455e78ae7..931beb2d1a 100644
--- a/src/library-aux/scala/Null.scala
+++ b/src/library-aux/scala/Null.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala
deleted file mode 100644
index e7db0d2db8..0000000000
--- a/src/library/scala/Application.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-
-import scala.compat.Platform.currentTime
-
-/** The `Application` trait can be used to quickly turn objects
- * into executable programs, but is ''not recommended''.
- * Here is an example:
- * {{{
- * object Main extends Application {
- * Console.println("Hello World!")
- * }
- * }}}
- * Here, object `Main` inherits the `main` method of `Application`.
- * The body of the `Main` object defines the main program. This technique
- * does not work if the main program depends on command-line arguments
- * (which are not accessible with the technique presented here).
- *
- * It is possible to time the execution of objects that inherit from class
- * `Application` by setting the global `scala.time`
- * property. Here is an example for benchmarking object `Main`:
- * {{{
- * java -Dscala.time Main
- * }}}
- * In practice the `Application` trait has a number of serious pitfalls:
- *
- * - Threaded code that references the object will block until static
- * initialization is complete. However, because the entire execution
- * of an `object` extending `Application` takes place during
- * static initialization, concurrent code will ''always'' deadlock if
- * it must synchronize with the enclosing object.
- * - As described above, there is no way to obtain the
- * command-line arguments because all code in body of an `object`
- * extending `Application` is run as part of the static initialization
- * which occurs before `Application`'s `main` method
- * even begins execution.
- * - Static initializers are run only once during program execution, and
- * JVM authors usually assume their execution to be relatively short.
- * Therefore, certain JVM configurations may become confused, or simply
- * fail to optimize or JIT the code in the body of an `object` extending
- * `Application`. This can lead to a significant performance degradation.
- *
- * It is recommended to use the [[scala.App]] trait instead.
- * {{{
- * object Main {
- * def main(args: Array[String]) {
- * //..
- * }
- * }
- * }}}
- *
- * @author Matthias Zenger
- * @version 1.0, 10/09/2003
- */
-@deprecated("use App instead", "2.9.0")
-trait Application {
-
- /** The time when the execution of this program started,
- * in milliseconds since 1 January 1970 UTC. */
- val executionStart: Long = currentTime
-
- /** The default main method.
- *
- * @param args the arguments passed to the main method
- */
- def main(args: Array[String]) {
- if (util.Properties propIsSet "scala.time") {
- val total = currentTime - executionStart
- Console.println("[total " + total + "ms]")
- }
- }
-}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index b9f51803ec..6ab82d998e 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -240,9 +240,9 @@ object Array extends FallbackArrayBuilding {
*/
def concat[T: ClassTag](xss: Array[T]*): Array[T] = {
val b = newBuilder[T]
- b.sizeHint(xss.map(_.size).sum)
+ b.sizeHint(xss.map(_.length).sum)
for (xs <- xss) b ++= xs
- b.result
+ b.result()
}
/** Returns an array that contains the results of some element computation a number
@@ -267,7 +267,7 @@ object Array extends FallbackArrayBuilding {
b += elem
i += 1
}
- b.result
+ b.result()
}
/** Returns a two-dimensional array that contains the results of some element
@@ -331,7 +331,7 @@ object Array extends FallbackArrayBuilding {
b += f(i)
i += 1
}
- b.result
+ b.result()
}
/** Returns a two-dimensional array containing values of a given function
@@ -399,14 +399,14 @@ object Array extends FallbackArrayBuilding {
def range(start: Int, end: Int, step: Int): Array[Int] = {
if (step == 0) throw new IllegalArgumentException("zero step")
val b = newBuilder[Int]
- b.sizeHint(immutable.Range.count(start, end, step, false))
+ b.sizeHint(immutable.Range.count(start, end, step, isInclusive = false))
var i = start
while (if (step < 0) end < i else i < end) {
b += i
i += step
}
- b.result
+ b.result()
}
/** Returns an array containing repeated applications of a function to a start value.
@@ -431,7 +431,7 @@ object Array extends FallbackArrayBuilding {
b += acc
}
}
- b.result
+ b.result()
}
/** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`.
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 440e546f19..e43b7d0a82 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,8 +10,6 @@
package scala
-import scala.language.implicitConversions
-
/** `Boolean` (equivalent to Java's `boolean` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
* represented by an object in the underlying runtime system.
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index df0d2c73b1..d1979236d3 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 1fa0c0d9e8..00ddff5b3b 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index 5b015502ea..275d7629ee 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -6,16 +6,12 @@
** |/ **
\* */
-
-
package scala
-import java.io.{BufferedReader, InputStream, InputStreamReader,
- IOException, OutputStream, PrintStream, Reader}
-import java.text.MessageFormat
+import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, PrintStream, Reader }
+import scala.io.{ AnsiColor, ReadStdin }
import scala.util.DynamicVariable
-
/** Implements functionality for
* printing Scala values on the terminal as well as reading specific values.
* Also defines constants for marking up text on ANSI terminals.
@@ -23,60 +19,16 @@ import scala.util.DynamicVariable
* @author Matthias Zenger
* @version 1.0, 03/09/2003
*/
-object Console {
-
- /** Foreground color for ANSI black */
- final val BLACK = "\033[30m"
- /** Foreground color for ANSI red */
- final val RED = "\033[31m"
- /** Foreground color for ANSI green */
- final val GREEN = "\033[32m"
- /** Foreground color for ANSI yellow */
- final val YELLOW = "\033[33m"
- /** Foreground color for ANSI blue */
- final val BLUE = "\033[34m"
- /** Foreground color for ANSI magenta */
- final val MAGENTA = "\033[35m"
- /** Foreground color for ANSI cyan */
- final val CYAN = "\033[36m"
- /** Foreground color for ANSI white */
- final val WHITE = "\033[37m"
-
- /** Background color for ANSI black */
- final val BLACK_B = "\033[40m"
- /** Background color for ANSI red */
- final val RED_B = "\033[41m"
- /** Background color for ANSI green */
- final val GREEN_B = "\033[42m"
- /** Background color for ANSI yellow */
- final val YELLOW_B = "\033[43m"
- /** Background color for ANSI blue */
- final val BLUE_B = "\033[44m"
- /** Background color for ANSI magenta */
- final val MAGENTA_B = "\033[45m"
- /** Background color for ANSI cyan */
- final val CYAN_B = "\033[46m"
- /** Background color for ANSI white */
- final val WHITE_B = "\033[47m"
-
- /** Reset ANSI styles */
- final val RESET = "\033[0m"
- /** ANSI bold */
- final val BOLD = "\033[1m"
- /** ANSI underlines */
- final val UNDERLINED = "\033[4m"
- /** ANSI blink */
- final val BLINK = "\033[5m"
- /** ANSI reversed */
- final val REVERSED = "\033[7m"
- /** ANSI invisible */
- final val INVISIBLE = "\033[8m"
-
+object Console extends DeprecatedConsole with AnsiColor {
private val outVar = new DynamicVariable[PrintStream](java.lang.System.out)
private val errVar = new DynamicVariable[PrintStream](java.lang.System.err)
- private val inVar = new DynamicVariable[BufferedReader](
+ private val inVar = new DynamicVariable[BufferedReader](
new BufferedReader(new InputStreamReader(java.lang.System.in)))
+ protected def setOutDirect(out: PrintStream): Unit = outVar.value = out
+ protected def setErrDirect(err: PrintStream): Unit = errVar.value = err
+ protected def setInDirect(in: BufferedReader): Unit = inVar.value = in
+
/** The default output, can be overridden by `setOut` */
def out = outVar.value
/** The default error, can be overridden by `setErr` */
@@ -84,12 +36,6 @@ object Console {
/** The default input, can be overridden by `setIn` */
def in = inVar.value
- /** Sets the default output stream.
- *
- * @param out the new output stream.
- */
- def setOut(out: PrintStream) { outVar.value = out }
-
/** Sets the default output stream for the duration
* of execution of one thunk.
*
@@ -106,13 +52,6 @@ object Console {
def withOut[T](out: PrintStream)(thunk: =>T): T =
outVar.withValue(out)(thunk)
- /** Sets the default output stream.
- *
- * @param out the new output stream.
- */
- def setOut(out: OutputStream): Unit =
- setOut(new PrintStream(out))
-
/** Sets the default output stream for the duration
* of execution of one thunk.
*
@@ -125,13 +64,6 @@ object Console {
def withOut[T](out: OutputStream)(thunk: =>T): T =
withOut(new PrintStream(out))(thunk)
-
- /** Sets the default error stream.
- *
- * @param err the new error stream.
- */
- def setErr(err: PrintStream) { errVar.value = err }
-
/** Set the default error stream for the duration
* of execution of one thunk.
* @example {{{
@@ -147,13 +79,6 @@ object Console {
def withErr[T](err: PrintStream)(thunk: =>T): T =
errVar.withValue(err)(thunk)
- /** Sets the default error stream.
- *
- * @param err the new error stream.
- */
- def setErr(err: OutputStream): Unit =
- setErr(new PrintStream(err))
-
/** Sets the default error stream for the duration
* of execution of one thunk.
*
@@ -166,15 +91,6 @@ object Console {
def withErr[T](err: OutputStream)(thunk: =>T): T =
withErr(new PrintStream(err))(thunk)
-
- /** Sets the default input stream.
- *
- * @param reader specifies the new input stream.
- */
- def setIn(reader: Reader) {
- inVar.value = new BufferedReader(reader)
- }
-
/** Sets the default input stream for the duration
* of execution of one thunk.
*
@@ -195,14 +111,6 @@ object Console {
def withIn[T](reader: Reader)(thunk: =>T): T =
inVar.withValue(new BufferedReader(reader))(thunk)
- /** Sets the default input stream.
- *
- * @param in the new input stream.
- */
- def setIn(in: InputStream) {
- setIn(new InputStreamReader(in))
- }
-
/** Sets the default input stream for the duration
* of execution of one thunk.
*
@@ -251,218 +159,64 @@ object Console {
* @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments
*/
def printf(text: String, args: Any*) { out.print(text format (args : _*)) }
+}
- /** Read a full line from the default input. Returns `null` if the end of the
- * input stream has been reached.
- *
- * @return the string read from the terminal or null if the end of stream was reached.
- */
- def readLine(): String = in.readLine()
-
- /** Print formatted text to the default output and read a full line from the default input.
- * Returns `null` if the end of the input stream has been reached.
- *
- * @param text the format of the text to print out, as in `printf`.
- * @param args the parameters used to instantiate the format, as in `printf`.
- * @return the string read from the default input
- */
- def readLine(text: String, args: Any*): String = {
- printf(text, args: _*)
- readLine()
- }
-
- /** Reads a boolean value from an entire line of the default input.
- * Has a fairly liberal interpretation of the input.
- *
- * @return the boolean value read, or false if it couldn't be converted to a boolean
- * @throws java.io.EOFException if the end of the input stream has been reached.
- */
- def readBoolean(): Boolean = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toLowerCase() match {
- case "true" => true
- case "t" => true
- case "yes" => true
- case "y" => true
- case _ => false
- }
- }
-
- /** Reads a byte value from an entire line of the default input.
- *
- * @return the Byte that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte
- */
- def readByte(): Byte = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toByte
- }
-
- /** Reads a short value from an entire line of the default input.
- *
- * @return the short that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Short
- */
- def readShort(): Short = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toShort
- }
-
- /** Reads a char value from an entire line of the default input.
- *
- * @return the Char that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty
- */
- def readChar(): Char = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s charAt 0
- }
-
- /** Reads an int value from an entire line of the default input.
- *
- * @return the Int that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to an Int
- */
- def readInt(): Int = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toInt
- }
-
- /** Reads an long value from an entire line of the default input.
- *
- * @return the Long that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Long
- */
- def readLong(): Long = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toLong
- }
+private[scala] abstract class DeprecatedConsole {
+ self: Console.type =>
+
+ /** Internal usage only. */
+ protected def setOutDirect(out: PrintStream): Unit
+ protected def setErrDirect(err: PrintStream): Unit
+ protected def setInDirect(in: BufferedReader): Unit
+
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readBoolean(): Boolean = ReadStdin.readBoolean()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readByte(): Byte = ReadStdin.readByte()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readChar(): Char = ReadStdin.readChar()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readDouble(): Double = ReadStdin.readDouble()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readFloat(): Float = ReadStdin.readFloat()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readInt(): Int = ReadStdin.readInt()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLine(): String = ReadStdin.readLine()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLine(text: String, args: Any*): String = ReadStdin.readLine(text, args: _*)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLong(): Long = ReadStdin.readLong()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readShort(): Short = ReadStdin.readShort()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf(format: String): List[Any] = ReadStdin.readf(format)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf1(format: String): Any = ReadStdin.readf1(format)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf2(format: String): (Any, Any) = ReadStdin.readf2(format)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf3(format: String): (Any, Any, Any) = ReadStdin.readf3(format)
- /** Reads a float value from an entire line of the default input.
- * @return the Float that was read.
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ /** Sets the default output stream.
*
+ * @param out the new output stream.
*/
- def readFloat(): Float = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toFloat
- }
+ @deprecated("Use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out)
- /** Reads a double value from an entire line of the default input.
+ /** Sets the default output stream.
*
- * @return the Double that was read.
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ * @param out the new output stream.
*/
- def readDouble(): Double = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toDouble
- }
+ @deprecated("Use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out))
- /** Reads in some structured input (from the default input), specified by
- * a format specifier. See class `java.text.MessageFormat` for details of
- * the format specification.
+ /** Sets the default error stream.
*
- * @param format the format of the input.
- * @return a list of all extracted values.
- * @throws java.io.EOFException if the end of the input stream has been
- * reached.
+ * @param err the new error stream.
*/
- def readf(format: String): List[Any] = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- textComponents(new MessageFormat(format).parse(s))
- }
+ @deprecated("Use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err)
- /** Reads in some structured input (from the default input), specified by
- * a format specifier, returning only the first value extracted, according
- * to the format specification.
+ /** Sets the default error stream.
*
- * @param format format string, as accepted by `readf`.
- * @return The first value that was extracted from the input
+ * @param err the new error stream.
*/
- def readf1(format: String): Any = readf(format).head
+ @deprecated("Use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err))
- /** Reads in some structured input (from the default input), specified
- * by a format specifier, returning only the first two values extracted,
- * according to the format specification.
+ /** Sets the default input stream.
*
- * @param format format string, as accepted by `readf`.
- * @return A [[scala.Tuple2]] containing the first two values extracted
+ * @param reader specifies the new input stream.
*/
- def readf2(format: String): (Any, Any) = {
- val res = readf(format)
- (res.head, res.tail.head)
- }
+ @deprecated("Use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader))
- /** Reads in some structured input (from the default input), specified
- * by a format specifier, returning only the first three values extracted,
- * according to the format specification.
+ /** Sets the default input stream.
*
- * @param format format string, as accepted by `readf`.
- * @return A [[scala.Tuple3]] containing the first three values extracted
+ * @param in the new input stream.
*/
- def readf3(format: String): (Any, Any, Any) = {
- val res = readf(format)
- (res.head, res.tail.head, res.tail.tail.head)
- }
-
- private def textComponents(a: Array[AnyRef]): List[Any] = {
- var i: Int = a.length - 1
- var res: List[Any] = Nil
- while (i >= 0) {
- res = (a(i) match {
- case x: java.lang.Boolean => x.booleanValue()
- case x: java.lang.Byte => x.byteValue()
- case x: java.lang.Short => x.shortValue()
- case x: java.lang.Character => x.charValue()
- case x: java.lang.Integer => x.intValue()
- case x: java.lang.Long => x.longValue()
- case x: java.lang.Float => x.floatValue()
- case x: java.lang.Double => x.doubleValue()
- case x => x
- }) :: res;
- i -= 1
- }
- res
- }
+ @deprecated("Use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in)))
}
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index f058d7c26b..85bf9fe5c5 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,8 +10,6 @@
package scala
-import scala.language.implicitConversions
-
/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
* subtype of [[scala.AnyVal]]. Instances of `Double` are not
* represented by an object in the underlying runtime system.
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 47d7840e27..59be0cdfa3 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -56,14 +56,6 @@ abstract class Enumeration (initial: Int) extends Serializable {
def this() = this(0)
- @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
- def this(initial: Int, names: String*) = {
- this(initial)
- this.nextName = names.iterator
- }
- @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
- def this(names: String*) = this(0, names: _*)
-
/* Note that `readResolve` cannot be private, since otherwise
the JVM does not invoke it when deserializing subclasses. */
protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null)
@@ -71,7 +63,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
/** The name of this enumeration.
*/
override def toString =
- ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
+ ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
Pattern.quote(NAME_JOIN_STRING)).last
/** The mapping from the integer used to identify values to the actual
@@ -103,7 +95,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
protected var nextName: Iterator[String] = _
private def nextNameOrNull =
- if (nextName != null && nextName.hasNext) nextName.next else null
+ if (nextName != null && nextName.hasNext) nextName.next() else null
/** The highest integer amongst those used to identify values in this
* enumeration. */
@@ -126,7 +118,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
*
* @param s an `Enumeration` name
* @return the `Value` of this `Enumeration` if its name matches `s`
- * @throws java.util.NoSuchElementException if no `Value` with a matching
+ * @throws NoSuchElementException if no `Value` with a matching
* name is in this `Enumeration`
*/
final def withName(s: String): Value = values.find(_.toString == s).get
@@ -262,7 +254,8 @@ abstract class Enumeration (initial: Int) extends Serializable {
def contains(v: Value) = nnIds contains (v.id - bottomId)
def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId))
def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId))
- def iterator = nnIds.iterator map (id => thisenum.apply(id + bottomId))
+ def iterator = nnIds.iterator map (id => thisenum.apply(bottomId + id))
+ override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id map (id => thisenum.apply(bottomId + id))
override def stringPrefix = thisenum + ".ValueSet"
/** Creates a bit mask for the zero-adjusted ids in this set as a
* new array of longs */
@@ -284,7 +277,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] {
private[this] val b = new mutable.BitSet
def += (x: Value) = { b += (x.id - bottomId); this }
- def clear() = b.clear
+ def clear() = b.clear()
def result() = new ValueSet(b.toImmutable)
}
/** The implicit builder for value sets */
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index d942acec23..f67f45897f 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index 5f87b38057..2223091eb3 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 22393c65dd..2e3de54c5a 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index f7e5d414f2..7789970a44 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 53742bf733..d4276f3fd1 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index e349d9017d..dfa8bcfce6 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index 10ec64b87a..5404c208bf 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index 82dd409223..3145290bcf 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index be5fbeeca1..309ef53e71 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index 7a185b369c..c4cb107e87 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 94e0000802..005ae2ab79 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index a3ee6776ba..371630dae3 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 038dcbb778..95c60a467e 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 0794a4048a..5690adb56a 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index 727684d6d5..a93f999d44 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index 2441278be8..7ebbb06798 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index 1f70b190a6..e5a3d83fb9 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index bbbde82056..850290d244 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index f100860a97..c9ac6df32e 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index cba9b6ce52..360a460306 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 0b8addf7de..d30877e765 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 2098658fa9..b19caf2b50 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 08a480dce5..3aff0b034c 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index 2e35f7949c..f80ccf48f9 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index ae36413469..1bacdbcee9 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 4ee9383c2a..83adcda819 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
index bf6e494c11..535f1ac699 100644
--- a/src/library/scala/LowPriorityImplicits.scala
+++ b/src/library/scala/LowPriorityImplicits.scala
@@ -22,7 +22,7 @@ import scala.language.implicitConversions
* @author Martin Odersky
* @since 2.8
*/
-class LowPriorityImplicits {
+private[scala] abstract class LowPriorityImplicits {
/** We prefer the java.lang.* boxed types to these wrappers in
* any potential conflicts. Conflicts do exist because the wrappers
* need to implement ScalaNumber in order to have a symmetric equals
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 3873df99e9..4b071166c7 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -209,6 +209,15 @@ sealed abstract class Option[+A] extends Product with Serializable {
def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x))
}
+ /** Tests whether the option contains a given value as an element.
+ *
+ * @param elem the element to test.
+ * @return `true` if the option has an element that is equal (as
+ * determined by `==`) to `elem`, `false` otherwise.
+ */
+ final def contains[A1 >: A](elem: A1): Boolean =
+ !isEmpty && this.get == elem
+
/** Returns true if this option is nonempty '''and''' the predicate
* $p returns true when applied to this $option's value.
* Otherwise, returns false.
@@ -247,7 +256,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* value (if possible), or $none.
*/
@inline final def collect[B](pf: PartialFunction[A, B]): Option[B] =
- if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
+ if (!isEmpty) pf.lift(this.get) else None
/** Returns this $option if it is nonempty,
* otherwise return the result of evaluating `alternative`.
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 9bb57877d9..9a468489a2 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -15,6 +15,7 @@ import generic.CanBuildFrom
import scala.annotation.{ elidable, implicitNotFound }
import scala.annotation.elidable.ASSERTION
import scala.language.{implicitConversions, existentials}
+import scala.io.ReadStdin
/** The `Predef` object provides definitions that are accessible in all Scala
* compilation units without explicit qualification.
@@ -68,7 +69,7 @@ import scala.language.{implicitConversions, existentials}
* Short value to a Long value as required, and to add additional higher-order
* functions to Array values. These are described in more detail in the documentation of [[scala.Array]].
*/
-object Predef extends LowPriorityImplicits {
+object Predef extends LowPriorityImplicits with DeprecatedPredef {
/**
* Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to
* the class literal `T.class` in Java.
@@ -101,19 +102,19 @@ object Predef extends LowPriorityImplicits {
// Manifest types, companions, and incantations for summoning
@annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
- @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
type ClassManifest[T] = scala.reflect.ClassManifest[T]
// TODO undeprecated until Scala reflection becomes non-experimental
// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
type OptManifest[T] = scala.reflect.OptManifest[T]
@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
type Manifest[T] = scala.reflect.Manifest[T]
- @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
val ClassManifest = scala.reflect.ClassManifest
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
val Manifest = scala.reflect.Manifest
// TODO undeprecated until Scala reflection becomes non-experimental
// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
@@ -136,22 +137,14 @@ object Predef extends LowPriorityImplicits {
// Apparently needed for the xml library
val $scope = scala.xml.TopScope
- // Deprecated
+ // errors and asserts -------------------------------------------------
+ // !!! Remove this when possible - ideally for 2.11.
+ // We are stuck with it a while longer because sbt's compiler interface
+ // still calls it as of 0.12.2.
@deprecated("Use `sys.error(message)` instead", "2.9.0")
def error(message: String): Nothing = sys.error(message)
- @deprecated("Use `sys.exit()` instead", "2.9.0")
- def exit(): Nothing = sys.exit()
-
- @deprecated("Use `sys.exit(status)` instead", "2.9.0")
- def exit(status: Int): Nothing = sys.exit(status)
-
- @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0")
- def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
-
- // errors and asserts -------------------------------------------------
-
/** Tests an expression, throwing an `AssertionError` if false.
* Calls to this method will not be generated if `-Xelide-below`
* is at least `ASSERTION`.
@@ -233,19 +226,6 @@ object Predef extends LowPriorityImplicits {
throw new IllegalArgumentException("requirement failed: "+ message)
}
- final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
- // `__resultOfEnsuring` must be a public val to allow inlining.
- // See comments in ArrowAssoc for more.
- @deprecated("Use `__resultOfEnsuring` instead", "2.10.0")
- def x = __resultOfEnsuring
-
- def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
- def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring }
- def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring }
- def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring }
- }
- @inline implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
-
/** `???` can be used for marking methods that remain to be implemented.
* @throws A `NotImplementedError`
*/
@@ -265,20 +245,58 @@ object Predef extends LowPriorityImplicits {
def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
}
- final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
- // `__leftOfArrow` must be a public val to allow inlining. The val
- // used to be called `x`, but now goes by `__leftOfArrow`, as that
- // reduces the chances of a user's writing `foo.__leftOfArrow` and
- // being confused why they get an ambiguous implicit conversion
- // error. (`foo.x` used to produce this error since both
- // any2Ensuring and any2ArrowAssoc pimped an `x` onto everything)
- @deprecated("Use `__leftOfArrow` instead", "2.10.0")
- def x = __leftOfArrow
+ // implicit classes -----------------------------------------------------
+ implicit final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
def →[B](y: B): Tuple2[A, B] = ->(y)
}
- @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+
+ implicit final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
+ def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
+ def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring }
+ def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring }
+ def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring }
+ }
+
+ implicit final class StringFormat[A](val __stringToFormat: A) extends AnyVal {
+ /** Returns string formatted according to given `format` string.
+ * Format strings are as for `String.format`
+ * (@see java.lang.String.format).
+ */
+ @inline def formatted(fmtstr: String): String = fmtstr format __stringToFormat
+ }
+
+ implicit final class StringAdd[A](val __thingToAdd: A) extends AnyVal {
+ def +(other: String) = String.valueOf(__thingToAdd) + other
+ }
+
+ implicit final class RichException(val __throwableToEnrich: Throwable) extends AnyVal {
+ import scala.compat.Platform.EOL
+ @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = __throwableToEnrich.getStackTrace().mkString("", EOL, EOL)
+ }
+
+ implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence {
+ def length: Int = __sequenceOfChars.length
+ def charAt(index: Int): Char = __sequenceOfChars(index)
+ def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end))
+ override def toString = __sequenceOfChars mkString ""
+ }
+
+ implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence {
+ def length: Int = __arrayOfChars.length
+ def charAt(index: Int): Char = __arrayOfChars(index)
+ def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end)
+ override def toString = __arrayOfChars mkString ""
+ }
+
+ implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
+ def apply(from: String) = apply()
+ def apply() = mutable.StringBuilder.newBuilder
+ }
+
+ @inline implicit def augmentString(x: String): StringOps = new StringOps(x)
+ @inline implicit def unaugmentString(x: StringOps): String = x.repr
// printing and reading -----------------------------------------------
@@ -287,28 +305,10 @@ object Predef extends LowPriorityImplicits {
def println(x: Any) = Console.println(x)
def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*))
- def readLine(): String = Console.readLine()
- def readLine(text: String, args: Any*) = Console.readLine(text, args: _*)
- def readBoolean() = Console.readBoolean()
- def readByte() = Console.readByte()
- def readShort() = Console.readShort()
- def readChar() = Console.readChar()
- def readInt() = Console.readInt()
- def readLong() = Console.readLong()
- def readFloat() = Console.readFloat()
- def readDouble() = Console.readDouble()
- def readf(format: String) = Console.readf(format)
- def readf1(format: String) = Console.readf1(format)
- def readf2(format: String) = Console.readf2(format)
- def readf3(format: String) = Console.readf3(format)
-
// views --------------------------------------------------------------
- implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x)
implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x)
- implicit def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
- implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new runtime.ArrayCharSequence(xs, 0, xs.length)
implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
case x: Array[AnyRef] => refArrayOps[AnyRef](x)
@@ -335,33 +335,6 @@ object Predef extends LowPriorityImplicits {
implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs)
implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
- // Primitive Widenings --------------------------------------------------------------
-
- @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort
- @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt
- @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
-
- @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt
- @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble
-
- @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt
- @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble
-
- @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble
-
- @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble
-
- @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble
-
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
implicit def byte2Byte(x: Byte) = java.lang.Byte.valueOf(x)
@@ -395,21 +368,6 @@ object Predef extends LowPriorityImplicits {
implicit def Double2double(x: java.lang.Double): Double = x.doubleValue
implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.booleanValue
- // Strings and CharSequences --------------------------------------------------------------
-
- @inline implicit def any2stringfmt(x: Any) = new runtime.StringFormat(x)
- @inline implicit def augmentString(x: String): StringOps = new StringOps(x)
- implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
- implicit def unaugmentString(x: StringOps): String = x.repr
-
- @deprecated("Use `StringCanBuildFrom`", "2.10.0")
- def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
-
- implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
- def apply(from: String) = apply()
- def apply() = mutable.StringBuilder.newBuilder
- }
-
// Type Constraints --------------------------------------------------------------
/**
@@ -460,3 +418,31 @@ object Predef extends LowPriorityImplicits {
implicit def dummyImplicit: DummyImplicit = new DummyImplicit
}
}
+
+private[scala] trait DeprecatedPredef {
+ self: Predef.type =>
+
+ // Deprecated stubs for any who may have been calling these methods directly.
+ @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+ @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
+ @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x)
+ @deprecated("Use String interpolation", "2.11.0") def any2stringadd(x: Any): StringAdd[Any] = new StringAdd(x)
+ @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc)
+ @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs)
+ @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs)
+
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLine(): String = ReadStdin.readLine()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLine(text: String, args: Any*) = ReadStdin.readLine(text, args: _*)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readBoolean() = ReadStdin.readBoolean()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readByte() = ReadStdin.readByte()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readShort() = ReadStdin.readShort()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readChar() = ReadStdin.readChar()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readInt() = ReadStdin.readInt()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLong() = ReadStdin.readLong()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readFloat() = ReadStdin.readFloat()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readDouble() = ReadStdin.readDouble()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf(format: String) = ReadStdin.readf(format)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf1(format: String) = ReadStdin.readf1(format)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf2(format: String) = ReadStdin.readf2(format)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf3(format: String) = ReadStdin.readf3(format)
+}
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 2c6838f6b3..0798587772 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index edd095c5c6..dbc34ba66a 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index 8daefde699..70de79d49a 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 90b4e8013e..1bb79ac017 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index d5997ea05a..d7e1e1b05c 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index db8e0f3722..8571b45a40 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 113c07e8c4..a2f5140370 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index a6ad9c7594..1c6ad0011c 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index cbf47ece94..f03b0b34a2 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index f56836bfd8..72df1b496a 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 5b86bcff65..0402f90a01 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index ed4bf36c93..b9770db47b 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index e27e54eff9..a43a4a285c 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index 47437a20af..7b0df201ec 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index 319d2725c0..f81347aac0 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 6ab3737acd..7a25891c6e 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 1cfbd7956b..9976240935 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 843571fd60..d6c1543390 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index df73bba3dd..5f1b11a30d 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 36906ca54e..efd9408d73 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index e7b2c13ad1..fab0a997a1 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 916e57ec39..41391f7050 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index d5e72edc0b..e22538e1ee 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala
index 1f7d047060..77094f0bbf 100644
--- a/src/library/scala/SerialVersionUID.scala
+++ b/src/library/scala/SerialVersionUID.scala
@@ -12,4 +12,4 @@ package scala
* Annotation for specifying the `static SerialVersionUID` field
* of a serializable class.
*/
-class SerialVersionUID(uid: Long) extends scala.annotation.StaticAnnotation
+class SerialVersionUID(value: Long) extends scala.annotation.ClassfileAnnotation
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 35c5fe3ff0..cdd298e542 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
index c7a6091a65..137598c28d 100644
--- a/src/library/scala/Specializable.scala
+++ b/src/library/scala/Specializable.scala
@@ -11,7 +11,7 @@ package scala
/** A common supertype for companions of specializable types.
* Should not be extended in user code.
*/
-trait Specializable extends SpecializableCompanion
+trait Specializable
object Specializable {
// No type parameter in @specialized annotation.
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 02fdd0cba5..6776e4fbff 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index ba2a02a8b2..e016dea63d 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 7f51d172d4..87e759fc0a 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index 4bbc6a0eab..7c95f8aa5f 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 77bd59bf2e..9f2ecd86da 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index bf7a4ce016..f03e279743 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index 582c359bc6..6074a40cd0 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index a1e9a790ff..0c38bd783f 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index f531766c18..7cc7ea8f7e 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index a96db25e4b..7404349989 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index 718280d68a..ca8f2ba401 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index 35d5a441c8..4337e62a53 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index 4a44c0bb89..9d6e2f71ff 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index 580a169e39..6173ddb118 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index fd3392ddea..d426a548e5 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 5ed13602e3..3c7e2af0d1 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index a859078bcf..b6913dbf48 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index 1edfb673ee..4f83f44cb9 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 5b74937e58..ac2ec43bd6 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index a7f572e9f0..62407b1d9b 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index 9bb427d689..0611fefd16 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 4d50539e0c..52f27f7c46 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala
index 10c6cccf15..0dfba2a187 100644
--- a/src/library/scala/UninitializedFieldError.scala
+++ b/src/library/scala/UninitializedFieldError.scala
@@ -18,8 +18,6 @@ package scala
*
* @since 2.7
*/
-final case class UninitializedFieldError(msg: String)
- extends RuntimeException(msg) {
- def this(obj: Any) =
- this(if (null != obj) obj.toString() else "null")
+final case class UninitializedFieldError(msg: String) extends RuntimeException(msg) {
+ def this(obj: Any) = this("" + obj)
}
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index dc67e60314..01e592ec3c 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,9 +10,6 @@
package scala
-import scala.language.implicitConversions
-
-
/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
* `Unit`, `()`, and it is not represented by any object in the underlying
* runtime system. A method with return type `Unit` is analogous to a Java
diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala
deleted file mode 100644
index 1e1aff19d3..0000000000
--- a/src/library/scala/annotation/serializable.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-/**
- * An annotation that designates the class to which it is applied as serializable
- */
-@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
-class serializable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/package.scala b/src/library/scala/annotation/target/package.scala
deleted file mode 100644
index ac2836c0a8..0000000000
--- a/src/library/scala/annotation/target/package.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-package object target {
- @deprecated("Use `@scala.annotation.meta.beanGetter` instead", "2.10.0")
- type beanGetter = scala.annotation.meta.beanGetter
-
- @deprecated("Use `@scala.annotation.meta.beanSetter` instead", "2.10.0")
- type beanSetter = scala.annotation.meta.beanSetter
-
- @deprecated("Use `@scala.annotation.meta.field` instead", "2.10.0")
- type field = scala.annotation.meta.field
-
- @deprecated("Use `@scala.annotation.meta.getter` instead", "2.10.0")
- type getter = scala.annotation.meta.getter
-
- @deprecated("Use `@scala.annotation.meta.param` instead", "2.10.0")
- type param = scala.annotation.meta.param
-
- @deprecated("Use `@scala.annotation.meta.setter` instead", "2.10.0")
- type setter = scala.annotation.meta.setter
-}
diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala
index 3a95335d71..ac8fa263d7 100644
--- a/src/library/scala/beans/ScalaBeanInfo.scala
+++ b/src/library/scala/beans/ScalaBeanInfo.scala
@@ -27,7 +27,7 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
for (m <- clazz.getMethods if methods.exists(_ == m.getName))
yield new MethodDescriptor(m)
- init
+ init()
override def getPropertyDescriptors() = pd
override def getMethodDescriptors() = md
@@ -35,10 +35,10 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
// override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
private def init() {
- var i = 0;
+ var i = 0
while (i < props.length) {
pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
- i = i + 3;
+ i = i + 3
}
}
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 4a1c0beaa6..72a6713ffd 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -11,7 +11,6 @@
package scala.collection
import BitSetLike._
-import generic._
import mutable.StringBuilder
/** A template trait for bitsets.
@@ -99,8 +98,10 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
fromBitMaskNoCopy(a)
}
- def iterator: Iterator[Int] = new AbstractIterator[Int] {
- private var current = 0
+ def iterator: Iterator[Int] = iteratorFrom(0)
+
+ override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] {
+ private var current = start
private val end = nwords * WordLength
def hasNext: Boolean = {
while (current < end && !self.contains(current)) current += 1
@@ -108,7 +109,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
}
def next(): Int =
if (hasNext) { val r = current; current += 1; r }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def foreach[B](f: Int => B) {
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index 5c91183891..bbd6b2c2fc 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -6,12 +6,8 @@
** |/ **
\* */
-
-
package scala.collection
-import generic._
-
/** A default map which implements the `+` and `-` methods of maps.
*
* Instances that inherit from `DefaultMap[A, B]` still have to define:
@@ -27,14 +23,14 @@ import generic._
* @since 2.8
*/
trait DefaultMap[A, +B] extends Map[A, B] { self =>
-
+
/** A default implementation which creates a new immutable map.
*/
override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
val b = Map.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
/** A default implementation which creates a new immutable map.
@@ -42,6 +38,6 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
override def - (key: A): Map[A, B] = {
val b = newBuilder
b ++= this filter (key != _._1)
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index 2ba9a7283d..ceb97707e1 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -8,7 +8,7 @@
package scala.collection
-import generic.{ CanBuildFrom => CBF, _ }
+import generic.{ CanBuildFrom => CBF }
/** A template trait for all iterable collections which may possibly
* have their operations implemented in parallel.
diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala
index ca0332e9ad..5ab48efdf3 100644
--- a/src/library/scala/collection/GenIterableView.scala
+++ b/src/library/scala/collection/GenIterableView.scala
@@ -8,11 +8,4 @@
package scala.collection
-
-import generic._
-
-
-
trait GenIterableView[+A, +Coll] extends GenIterableViewLike[A, Coll, GenIterableView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
index 4e4ceb4cea..e8d264cdd4 100644
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -8,13 +8,6 @@
package scala.collection
-
-
-import generic._
-import TraversableView.NoBuilder
-
-
-
trait GenIterableViewLike[+A,
+Coll,
+This <: GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This]]
diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala
index 92c8b779e9..423f8e305e 100644
--- a/src/library/scala/collection/GenSeqView.scala
+++ b/src/library/scala/collection/GenSeqView.scala
@@ -8,11 +8,4 @@
package scala.collection
-
-import generic._
-
-
-
trait GenSeqView[+A, +Coll] extends GenSeqViewLike[A, Coll, GenSeqView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 46134c921e..1080c54325 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -238,7 +238,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* // lettersOf will return a Set[Char], not a Seq
* def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
*
- * // xs will be a an Iterable[Int]
+ * // xs will be an Iterable[Int]
* val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
*
* // ys will be a Map[Int, Int]
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 093db2a972..a05ee0fb54 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -119,19 +119,6 @@ trait GenTraversableOnce[+A] extends Any {
*/
def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1
- /** A syntactic sugar for out of order folding. See `fold`.
- *
- * Example:
- * {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
- * scala> val b = (a /:\ 5)(_+_)
- * b: Int = 15
- * }}}*/
- @deprecated("use fold instead", "2.10.0")
- def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
-
/** Applies a binary operator to a start value and all elements of this $coll,
* going left to right.
*
@@ -261,11 +248,12 @@ trait GenTraversableOnce[+A] extends Any {
* @tparam B the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
- * `Nil` for list concatenation or `0` for summation)
+ * `Nil` for list concatenation or `0` for summation) and may be evaluated
+ * more than once
* @param seqop an operator used to accumulate results within a partition
* @param combop an associative operator used to combine results from different partitions
*/
- def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B
+ def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B
/** Applies a binary operator to all elements of this $coll, going right to left.
* $willNotTerminateInf
diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala
index cceb06882e..1d98eff8c1 100644
--- a/src/library/scala/collection/GenTraversableView.scala
+++ b/src/library/scala/collection/GenTraversableView.scala
@@ -8,11 +8,4 @@
package scala.collection
-
-import generic._
-
-
-
trait GenTraversableView[+A, +Coll] extends GenTraversableViewLike[A, Coll, GenTraversableView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
index 77fe0802bf..8c9607663b 100644
--- a/src/library/scala/collection/GenTraversableViewLike.scala
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -11,8 +11,6 @@ package scala.collection
import generic._
import mutable.{ Builder, ArrayBuffer }
-import TraversableView.NoBuilder
-
trait GenTraversableViewLike[+A,
+Coll,
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 2de0043c96..0b6e640537 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala.collection
import generic._
@@ -28,14 +26,13 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends SeqFactory[IndexedSeq] {
+object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
// A single CBF which can be checked against to identify
// an indexed collection type.
- override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+ override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
}
-
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 9d0e9cbaea..473202a8eb 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -8,7 +8,6 @@
package scala.collection
-import generic._
import mutable.ArrayBuffer
import scala.annotation.tailrec
@@ -53,7 +52,6 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
// pre: start >= 0, end <= self.length
@SerialVersionUID(1756321872811029277L)
protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable {
- private def initialSize = if (end <= start) 0 else end - start
private var index = start
private def available = (end - index) max 0
@@ -61,7 +59,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
def next(): A = {
if (index >= end)
- Iterator.empty.next
+ Iterator.empty.next()
val x = self(index)
index += 1
@@ -70,7 +68,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
def head = {
if (index >= end)
- Iterator.empty.next
+ Iterator.empty.next()
self(index)
}
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 09c4b14ba0..ade04e4de8 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -33,11 +33,17 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
while (i < len) { f(this(i)); i += 1 }
}
+ private def prefixLengthImpl(p: A => Boolean, expectTrue: Boolean): Int = {
+ var i = 0
+ while (i < length && p(apply(i)) == expectTrue) i += 1
+ i
+ }
+
override /*IterableLike*/
- def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length
+ def forall(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = true) == length
override /*IterableLike*/
- def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length
+ def exists(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = false) != length
override /*IterableLike*/
def find(p: A => Boolean): Option[A] = {
@@ -82,7 +88,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
b += ((this(i), that(i).asInstanceOf[B]))
i += 1
}
- b.result
+ b.result()
case _ =>
super.zip[A1, B, That](that)(bf)
}
@@ -97,7 +103,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
b += ((this(i), i))
i += 1
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -113,7 +119,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
b += self(i)
i += 1
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -214,7 +220,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
i -= 1
b += this(i)
}
- b.result
+ b.result()
}
override /*SeqLike*/
@@ -225,7 +231,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
if (0 < i) {
i -= 1
self(i)
- } else Iterator.empty.next
+ } else Iterator.empty.next()
}
override /*SeqLike*/
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 5b73d720a8..09c9ce122c 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -11,7 +11,6 @@
package scala.collection
import generic._
-import scala.util.control.Breaks._
import mutable.Builder
/** A base trait for iterable collections.
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 540bd84b79..b043d1f2a6 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -88,13 +88,13 @@ self =>
override /*TraversableLike*/ def toIterator: Iterator[A] =
iterator
override /*TraversableLike*/ def head: A =
- iterator.next
+ iterator.next()
override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = {
val lo = math.max(from, 0)
val elems = until - lo
val b = newBuilder
- if (elems <= 0) b.result
+ if (elems <= 0) b.result()
else {
b.sizeHintBounded(elems, this)
var i = 0
@@ -103,14 +103,14 @@ self =>
b += it.next
i += 1
}
- b.result
+ b.result()
}
}
override /*TraversableLike*/ def take(n: Int): Repr = {
val b = newBuilder
- if (n <= 0) b.result
+ if (n <= 0) b.result()
else {
b.sizeHintBounded(n, this)
var i = 0
@@ -119,7 +119,7 @@ self =>
b += it.next
i += 1
}
- b.result
+ b.result()
}
}
@@ -130,21 +130,21 @@ self =>
var i = 0
val it = iterator
while (i < n && it.hasNext) {
- it.next
+ it.next()
i += 1
}
- (b ++= it).result
+ (b ++= it).result()
}
override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = {
val b = newBuilder
val it = iterator
while (it.hasNext) {
- val x = it.next
- if (!p(x)) return b.result
+ val x = it.next()
+ if (!p(x)) return b.result()
b += x
}
- b.result
+ b.result()
}
/** Partitions elements in fixed size ${coll}s.
@@ -158,7 +158,7 @@ self =>
for (xs <- iterator grouped size) yield {
val b = newBuilder
b ++= xs
- b.result
+ b.result()
}
/** Groups elements in fixed size blocks by passing a "sliding window"
@@ -187,7 +187,7 @@ self =>
for (xs <- iterator.sliding(size, step)) yield {
val b = newBuilder
b ++= xs
- b.result
+ b.result()
}
/** Selects last ''n'' elements.
@@ -203,11 +203,11 @@ self =>
val lead = this.iterator drop n
var go = false
for (x <- this.seq) {
- if (lead.hasNext) lead.next
+ if (lead.hasNext) lead.next()
else go = true
if (go) b += x
}
- b.result
+ b.result()
}
/** Selects all elements except last ''n'' ones.
@@ -224,9 +224,9 @@ self =>
val it = iterator
while (lead.hasNext) {
b += it.next
- lead.next
+ lead.next()
}
- b.result
+ b.result()
}
override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
@@ -234,7 +234,7 @@ self =>
val end = (start + len) min xs.length
val it = iterator
while (i < end && it.hasNext) {
- xs(i) = it.next
+ xs(i) = it.next()
i += 1
}
}
@@ -244,8 +244,8 @@ self =>
val these = this.iterator
val those = that.iterator
while (these.hasNext && those.hasNext)
- b += ((these.next, those.next))
- b.result
+ b += ((these.next(), those.next()))
+ b.result()
}
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
@@ -253,12 +253,12 @@ self =>
val these = this.iterator
val those = that.iterator
while (these.hasNext && those.hasNext)
- b += ((these.next, those.next))
+ b += ((these.next(), those.next()))
while (these.hasNext)
- b += ((these.next, thatElem))
+ b += ((these.next(), thatElem))
while (those.hasNext)
- b += ((thisElem, those.next))
- b.result
+ b += ((thisElem, those.next()))
+ b.result()
}
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
@@ -268,7 +268,7 @@ self =>
b += ((x, i))
i +=1
}
- b.result
+ b.result()
}
def sameElements[B >: A](that: GenIterable[B]): Boolean = {
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index 2d041928cc..ddb2502965 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -8,8 +8,6 @@
package scala.collection
-import generic._
-
/** This trait implements a proxy for iterable objects. It forwards all calls
* to a different iterable object.
*
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 3a81a3422f..b195ae4bc7 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -9,7 +9,6 @@
package scala.collection
import generic._
-import TraversableView.NoBuilder
import immutable.Stream
import scala.language.implicitConversions
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 2bb5bd1df9..43db7c55e0 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -368,7 +368,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
private var cur: Iterator[B] = empty
def hasNext: Boolean =
- cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext }
+ cur.hasNext || self.hasNext && { cur = f(self.next()).toIterator; hasNext }
def next(): B = (if (hasNext) cur else empty).next()
}
@@ -408,7 +408,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = {
val that0 = that.toIterator
while (hasNext && that0.hasNext)
- if (!p(next, that0.next)) return false
+ if (!p(next(), that0.next())) return false
hasNext == that0.hasNext
}
@@ -562,7 +562,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
* handling of structural calls. It's not what's intended here.
*/
class Leading extends AbstractIterator[A] {
- private var isDone = false
val lookahead = new mutable.Queue[A]
def advance() = {
self.hasNext && p(self.head) && {
@@ -572,7 +571,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
def finish() = {
while (advance()) ()
- isDone = true
}
def hasNext = lookahead.nonEmpty || advance()
def next() = {
@@ -632,7 +630,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] {
def hasNext = self.hasNext && that.hasNext
- def next = (self.next, that.next)
+ def next = (self.next(), that.next())
}
/** Appends an element value to this iterator until a given target length is reached.
@@ -652,9 +650,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
def hasNext = self.hasNext || count < len
def next = {
count += 1
- if (self.hasNext) self.next
+ if (self.hasNext) self.next()
else if (count <= len) elem
- else empty.next
+ else empty.next()
}
}
@@ -669,7 +667,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
var idx = 0
def hasNext = self.hasNext
def next = {
- val ret = (self.next, idx)
+ val ret = (self.next(), idx)
idx += 1
ret
}
@@ -1054,12 +1052,12 @@ trait Iterator[+A] extends TraversableOnce[A] {
val e = self.next()
gap enqueue e
e
- } else gap.dequeue
+ } else gap.dequeue()
}
// to verify partnerhood we use reference equality on gap because
// type testing does not discriminate based on origin.
private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue
- override def hashCode = gap.hashCode
+ override def hashCode = gap.hashCode()
override def equals(other: Any) = other match {
case x: Partner => x.compareGap(gap) && gap.isEmpty
case _ => super.equals(other)
@@ -1118,6 +1116,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
xs(i) = next()
i += 1
}
+ // TODO: return i - start so the caller knows how many values read?
}
/** Tests if another iterator produces the same values as this one.
@@ -1140,7 +1139,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toTraversable: Traversable[A] = toStream
def toIterator: Iterator[A] = self
def toStream: Stream[A] =
- if (self.hasNext) Stream.cons(self.next, self.toStream)
+ if (self.hasNext) Stream.cons(self.next(), self.toStream)
else Stream.empty[A]
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 59d4259c70..3cb7edacd6 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -8,7 +8,6 @@
package scala.collection
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import convert._
/** A collection of implicit conversions supporting interoperability between
@@ -22,7 +21,6 @@ import convert._
* scala.collection.mutable.Buffer <=> java.util.List
* scala.collection.mutable.Set <=> java.util.Set
* scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
- * scala.collection.mutable.ConcurrentMap (deprecated since 2.10) <=> java.util.concurrent.ConcurrentMap
* scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
*}}}
* In all cases, converting from a source type to a target type and back
@@ -91,42 +89,6 @@ object JavaConversions extends WrapAsScala with WrapAsJava {
@deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper
@deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper
@deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper
-
- // Note to implementors: the cavalcade of deprecated methods herein should
- // serve as a warning to any who follow: don't overload implicit methods.
-
- @deprecated("use bufferAsJavaList instead", "2.9.0")
- def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
-
- @deprecated("use mutableSeqAsJavaList instead", "2.9.0")
- def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
-
- @deprecated("use seqAsJavaList instead", "2.9.0")
- def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
-
- @deprecated("use mutableSetAsJavaSet instead", "2.9.0")
- def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
-
- @deprecated("use setAsJavaSet instead", "2.9.0")
- def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s)
-
- @deprecated("use mutableMapAsJavaMap instead", "2.9.0")
- def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
-
- @deprecated("use mapAsJavaMap instead", "2.9.0")
- def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
-
- @deprecated("use iterableAsScalaIterable instead", "2.9.0")
- def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
-
- @deprecated("use collectionAsScalaIterable instead", "2.9.0")
- def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
-
- @deprecated("use mapAsScalaMap instead", "2.9.0")
- def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
-
- @deprecated("use propertiesAsScalaMap instead", "2.9.0")
- def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
}
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index ab3ac8925c..7700d90560 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -8,14 +8,12 @@
package scala.collection
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import convert._
// TODO: I cleaned all this documentation up in JavaConversions, but the
// documentation in here is basically the pre-cleaned-up version with minor
// additions. Would be nice to have in one place.
-
/** A collection of decorators that allow converting between
* Scala and Java collections using `asScala` and `asJava` methods.
*
@@ -26,7 +24,7 @@ import convert._
* - `scala.collection.mutable.Buffer` <=> `java.util.List`
* - `scala.collection.mutable.Set` <=> `java.util.Set`
* - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
*
* In all cases, converting from a source type to a target type and back
* again will return the original source object, e.g.
@@ -67,37 +65,4 @@ object JavaConverters extends DecorateAsJava with DecorateAsScala {
type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A]
@deprecated("Don't access these decorators directly.", "2.10.0")
type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B]
-
- @deprecated("Use bufferAsJavaListConverter instead", "2.9.0")
- def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b)
-
- @deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0")
- def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b)
-
- @deprecated("Use seqAsJavaListConverter instead", "2.9.0")
- def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b)
-
- @deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0")
- def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s)
-
- @deprecated("Use setAsJavaSetConverter instead", "2.9.0")
- def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s)
-
- @deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0")
- def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m)
-
- @deprecated("Use mapAsJavaMapConverter instead", "2.9.0")
- def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m)
-
- @deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0")
- def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i)
-
- @deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0")
- def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i)
-
- @deprecated("Use mapAsScalaMapConverter instead", "2.9.0")
- def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m)
-
- @deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0")
- def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p)
}
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 78108a9c0f..a4bb194f8a 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
package scala.collection
-import generic._
-import mutable.ListBuffer
import immutable.List
-import scala.util.control.Breaks._
import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
@@ -59,14 +55,14 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
def next(): A =
if (hasNext) {
val result = these.head; these = these.tail; result
- } else Iterator.empty.next
+ } else Iterator.empty.next()
/** Have to clear `these` so the iterator is exhausted like
* it would be without the optimization.
*/
override def toList: List[A] = {
val xs = these.toList
- these = newBuilder.result
+ these = newBuilder.result()
xs
}
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 81cccea519..9cf37981f4 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -8,10 +8,8 @@
package scala.collection
-import generic._
import mutable.ListBuffer
import immutable.List
-import scala.util.control.Breaks._
import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
@@ -84,7 +82,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
override /*SeqLike*/
- def contains(elem: Any): Boolean = {
+ def contains[A1 >: A](elem: A1): Boolean = {
var these = this
while (!these.isEmpty) {
if (these.head == elem) return true
@@ -152,7 +150,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- b.result
+ b.result()
}
override /*TraversableLike*/
@@ -187,7 +185,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
these = these.tail
lead = lead.tail
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -195,7 +193,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
var these: Repr = repr
var count = from max 0
if (until <= count)
- return newBuilder.result
+ return newBuilder.result()
val b = newBuilder
var sliceElems = until - count
@@ -208,7 +206,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -219,7 +217,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- b.result
+ b.result()
}
override /*TraversableLike*/
@@ -230,7 +228,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- (b.result, these)
+ (b.result(), these)
}
override /*IterableLike*/
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 93d02a435c..cc0129202f 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -181,7 +181,7 @@ self =>
def keysIterator: Iterator[A] = new AbstractIterator[A] {
val iter = self.iterator
def hasNext = iter.hasNext
- def next() = iter.next._1
+ def next() = iter.next()._1
}
/** Collects all keys of this map in an iterable collection.
@@ -213,7 +213,7 @@ self =>
def valuesIterator: Iterator[B] = new AbstractIterator[B] {
val iter = self.iterator
def hasNext = iter.hasNext
- def next() = iter.next._2
+ def next() = iter.next()._2
}
/** Defines the default value computation for the map,
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 44b39f65da..ad09f7b970 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -8,8 +8,6 @@
package scala.collection
-import generic._
-
// Methods could be printed by cat MapLike.scala | egrep '^ (override )?def'
/** This trait implements a proxy for Map objects. It forwards
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index d97c44abc0..626dfa4032 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -39,7 +39,7 @@ trait Parallelizable[+A, +ParRepr <: Parallel] extends Any {
def par: ParRepr = {
val cb = parCombiner
for (x <- seq) cb += x
- cb.result
+ cb.result()
}
/** The default `par` implementation uses the combiner provided by this method
diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala
new file mode 100644
index 0000000000..03eb4283ad
--- /dev/null
+++ b/src/library/scala/collection/Searching.scala
@@ -0,0 +1,116 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+import scala.annotation.tailrec
+import scala.collection.generic.IsSeqLike
+import scala.math.Ordering
+
+/** A collection of wrappers that provide sequence classes with search functionality.
+ *
+ * Example usage:
+ * {{{
+ * import scala.collection.Searching._
+ * val l = List(1, 2, 3, 4, 5)
+ * l.search(3)
+ * // == Found(2)
+ * }}}
+ */
+object Searching {
+ sealed abstract class SearchResult {
+ def insertionPoint: Int
+ }
+
+ case class Found(foundIndex: Int) extends SearchResult {
+ override def insertionPoint = foundIndex
+ }
+ case class InsertionPoint(insertionPoint: Int) extends SearchResult
+
+ class SearchImpl[A, Repr](val coll: SeqLike[A, Repr]) {
+ /** Search the sorted sequence for a specific element. If the sequence is an
+ * `IndexedSeq`, a binary search is used. Otherwise, a linear search is used.
+ *
+ * The sequence should be sorted with the same `Ordering` before calling; otherwise,
+ * the results are undefined.
+ *
+ * @see [[scala.collection.IndexedSeq]]
+ * @see [[scala.math.Ordering]]
+ * @see [[scala.collection.SeqLike]], method `sorted`
+ *
+ * @param elem the element to find.
+ * @param ord the ordering to be used to compare elements.
+ *
+ * @return a `Found` value containing the index corresponding to the element in the
+ * sequence, or the `InsertionPoint` where the element would be inserted if
+ * the element is not in the sequence.
+ */
+ final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult =
+ coll match {
+ case _: IndexedSeq[A] => binarySearch(elem, -1, coll.length)(ord)
+ case _ => linearSearch(coll.view, elem, 0)(ord)
+ }
+
+ /** Search within an interval in the sorted sequence for a specific element. If the
+ * sequence is an IndexedSeq, a binary search is used. Otherwise, a linear search
+ * is used.
+ *
+ * The sequence should be sorted with the same `Ordering` before calling; otherwise,
+ * the results are undefined.
+ *
+ * @see [[scala.collection.IndexedSeq]]
+ * @see [[scala.math.Ordering]]
+ * @see [[scala.collection.SeqLike]], method `sorted`
+ *
+ * @param elem the element to find.
+ * @param from the index where the search starts.
+ * @param to the index following where the search ends.
+ * @param ord the ordering to be used to compare elements.
+ *
+ * @return a `Found` value containing the index corresponding to the element in the
+ * sequence, or the `InsertionPoint` where the element would be inserted if
+ * the element is not in the sequence.
+ */
+ final def search[B >: A](elem: B, from: Int, to: Int)
+ (implicit ord: Ordering[B]): SearchResult =
+ coll match {
+ case _: IndexedSeq[A] => binarySearch(elem, from-1, to)(ord)
+ case _ => linearSearch(coll.view(from, to), elem, from)(ord)
+ }
+
+ @tailrec
+ private def binarySearch[B >: A](elem: B, from: Int, to: Int)
+ (implicit ord: Ordering[B]): SearchResult = {
+ if ((to-from) == 1) InsertionPoint(from) else {
+ val idx = from+(to-from)/2
+ math.signum(ord.compare(elem, coll(idx))) match {
+ case -1 => binarySearch(elem, from, idx)(ord)
+ case 1 => binarySearch(elem, idx, to)(ord)
+ case _ => Found(idx)
+ }
+ }
+ }
+
+ private def linearSearch[B >: A](c: SeqView[A, Repr], elem: B, offset: Int)
+ (implicit ord: Ordering[B]): SearchResult = {
+ var idx = offset
+ val it = c.iterator
+ while (it.hasNext) {
+ val cur = it.next()
+ if (ord.equiv(elem, cur)) return Found(idx)
+ else if (ord.lt(elem, cur)) return InsertionPoint(idx-1)
+ idx += 1
+ }
+ InsertionPoint(idx)
+ }
+
+ }
+
+ implicit def search[Repr, A](coll: Repr)
+ (implicit fr: IsSeqLike[Repr]): SearchImpl[fr.A, Repr] = new SearchImpl(fr.conversion(coll))
+}
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 1be0dba29f..a83a6fe6a1 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -107,7 +107,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def segmentLength(p: A => Boolean, from: Int): Int = {
var i = 0
- var it = iterator.drop(from)
+ val it = iterator.drop(from)
while (it.hasNext && p(it.next()))
i += 1
i
@@ -115,7 +115,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def indexWhere(p: A => Boolean, from: Int): Int = {
var i = from
- var it = iterator.drop(from)
+ val it = iterator.drop(from)
while (it.hasNext) {
if (p(it.next())) return i
else i += 1
@@ -127,7 +127,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def lastIndexWhere(p: A => Boolean, end: Int): Int = {
var i = length - 1
val it = reverseIterator
- while (it.hasNext && { val elem = it.next; (i > end || !p(elem)) }) i -= 1
+ while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1
i
}
@@ -156,10 +156,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def hasNext = _hasNext
def next(): Repr = {
if (!hasNext)
- Iterator.empty.next
+ Iterator.empty.next()
val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
- val result = (self.newBuilder ++= forcedElms).result
+ val result = (self.newBuilder ++= forcedElms).result()
var i = idxs.length - 2
while(i >= 0 && idxs(i) >= idxs(i+1))
i -= 1
@@ -181,10 +181,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
result
}
private def swap(i: Int, j: Int) {
- var tmpI = idxs(i)
+ val tmpI = idxs(i)
idxs(i) = idxs(j)
idxs(j) = tmpI
- var tmpE = elms(i)
+ val tmpE = elms(i)
elms(i) = elms(j)
elms(j) = tmpE
}
@@ -208,13 +208,13 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def hasNext = _hasNext
def next(): Repr = {
if (!hasNext)
- Iterator.empty.next
+ Iterator.empty.next()
/** Calculate this result. */
val buf = self.newBuilder
for(k <- 0 until nums.length; j <- 0 until nums(k))
buf += elms(offs(k)+j)
- val res = buf.result
+ val res = buf.result()
/** Prepare for the next call to next. */
var idx = nums.length - 1
@@ -268,7 +268,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b.sizeHint(this)
for (x <- xs)
b += x
- b.result
+ b.result()
}
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -279,7 +279,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
for (x <- xs)
b += f(x)
- b.result
+ b.result()
}
/** An iterator yielding elements in reversed order.
@@ -335,7 +335,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
if (from > l) -1
else if (tl < 1) clippedFrom
else if (l < tl) -1
- else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, true)
+ else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, forward = true)
}
else {
var i = from
@@ -372,7 +372,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
if (end < 0) -1
else if (tl < 1) clippedL
else if (l < tl) -1
- else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, false)
+ else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, forward = false)
}
/** Tests whether this $coll contains a given sequence as a slice.
@@ -390,7 +390,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* @return `true` if this $coll has an element that is equal (as
* determined by `==`) to `elem`, `false` otherwise.
*/
- def contains(elem: Any): Boolean = exists (_ == elem)
+ def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem)
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
@@ -442,7 +442,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
for (x <- this)
if (occ(x) == 0) b += x
else occ(x) -= 1
- b.result
+ b.result()
}
/** Computes the multiset intersection between this $coll and another sequence.
@@ -473,7 +473,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b += x
occ(x) -= 1
}
- b.result
+ b.result()
}
private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = {
@@ -496,7 +496,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
seen += x
}
}
- b.result
+ b.result()
}
def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -505,7 +505,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b ++= toCollection(prefix)
b ++= patch.seq
b ++= toCollection(rest).view drop replaced
- b.result
+ b.result()
}
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -514,21 +514,21 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b ++= toCollection(prefix)
b += elem
b ++= toCollection(rest).view.tail
- b.result
+ b.result()
}
def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b += elem
b ++= thisCollection
- b.result
+ b.result()
}
def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b ++= thisCollection
b += elem
- b.result
+ b.result()
}
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -540,14 +540,14 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b += elem
diff -= 1
}
- b.result
+ b.result()
}
def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
val i = this.iterator
val j = that.iterator
while (i.hasNext && j.hasNext)
- if (!p(i.next, j.next))
+ if (!p(i.next(), j.next()))
return false
!i.hasNext && !j.hasNext
@@ -616,7 +616,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
val b = newBuilder
b.sizeHint(len)
for (x <- arr) b += x
- b.result
+ b.result()
}
/** Converts this $coll to a sequence.
@@ -682,7 +682,7 @@ object SeqLike {
val wit = W.iterator.drop(n0)
var i = if (forward) 0 else (n1-n0-1)
while (i != done) {
- Warr(i) = wit.next.asInstanceOf[AnyRef]
+ Warr(i) = wit.next().asInstanceOf[AnyRef]
i += delta
}
@@ -778,15 +778,15 @@ object SeqLike {
case _ =>
// We had better not index into S directly!
val iter = S.iterator.drop(m0)
- val Wopt = kmpOptimizeWord(W, n0, n1, true)
+ val Wopt = kmpOptimizeWord(W, n0, n1, forward = true)
val T = kmpJumpTable(Wopt, n1-n0)
- var cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind
+ val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind
var largest = 0
var i, m = 0
var answer = -1
while (m+m0+n1-n0 <= m1) {
while (i+m >= largest) {
- cache(largest%(n1-n0)) = iter.next.asInstanceOf[AnyRef]
+ cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef]
largest += 1
}
if (Wopt(i) == cache((i+m)%(n1-n0))) {
@@ -851,7 +851,7 @@ object SeqLike {
else if (s1 - s0 < t1 - t0) -1 // Source is too short to find target
else {
// Nontrivial search
- val ans = kmpSearch(source, s0, s1, target, t0, t1, true)
+ val ans = kmpSearch(source, s0, s1, target, t0, t1, forward = true)
if (ans < 0) ans else ans - math.min(slen, sourceOffset)
}
}
@@ -883,7 +883,7 @@ object SeqLike {
else if (fixed_s1 - s0 < t1 - t0) -1 // Source is too short to find target
else {
// Nontrivial search
- val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, false)
+ val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, forward = false)
if (ans < 0) ans else ans - s0
}
}
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index 5e8030d1e4..ee88ee3da3 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -50,7 +50,7 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A,
override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that)
override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1
- override def contains(elem: Any): Boolean = self.contains(elem)
+ override def contains[A1 >: A](elem: A1): Boolean = self.contains(elem)
override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that)
override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that)
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 5f2bf902b1..27536791a2 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -10,7 +10,6 @@ package scala.collection
import generic._
import Seq.fill
-import TraversableView.NoBuilder
/** A template trait for non-strict views of sequences.
* $seqViewInfo
diff --git a/src/library/scala/collection/Sequentializable.scala.disabled b/src/library/scala/collection/Sequentializable.scala.disabled
deleted file mode 100644
index df457671a6..0000000000
--- a/src/library/scala/collection/Sequentializable.scala.disabled
+++ /dev/null
@@ -1,10 +0,0 @@
-package scala.collection
-
-
-
-
-trait Sequentializable[+T, +Repr] {
-
- def seq: Repr
-
-}
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index a6ebcc0e20..9fd24317f2 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -180,14 +180,14 @@ self =>
def hasNext = len <= elms.size || itr.hasNext
def next = {
if (!itr.hasNext) {
- if (len > elms.size) Iterator.empty.next
+ if (len > elms.size) Iterator.empty.next()
else {
itr = new SubsetsItr(elms, len)
len += 1
}
}
- itr.next
+ itr.next()
}
}
@@ -205,11 +205,11 @@ self =>
def hasNext = _hasNext
def next(): This = {
- if (!hasNext) Iterator.empty.next
+ if (!hasNext) Iterator.empty.next()
val buf = self.newBuilder
idxs.slice(0, len) foreach (idx => buf += elms(idx))
- val result = buf.result
+ val result = buf.result()
var i = len - 1
while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index 5196f39917..265d1c4806 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
package scala.collection
-import generic._
-
// Methods could be printed by cat SetLike.scala | egrep '^ (override )?def'
/** This trait implements a proxy for sets. It forwards
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index c81c16e8bb..86fcfac94d 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -40,13 +40,13 @@ object SortedMap extends SortedMapFactory[SortedMap] {
val b = SortedMap.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
override def - (key: A): SortedMap[A, B] = {
val b = newBuilder
for (kv <- this; if kv._1 != key) b += kv
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 57ad3497c7..934ed831f5 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -42,6 +42,7 @@ self =>
val map = self.rangeImpl(from, until)
new map.DefaultKeySortedSet
}
+ override def keysIteratorFrom(start: A) = self.keysIteratorFrom(start)
}
/** Add a key/value pair to this map.
@@ -68,7 +69,7 @@ self =>
* @param elems the remaining elements to add.
*/
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = {
- var m = this + elem1 + elem2;
+ var m = this + elem1 + elem2
for (e <- elems) m = m + e
m
}
@@ -76,11 +77,17 @@ self =>
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
+ override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p
+ override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
}
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ override def iteratorFrom(start: A) = (self iteratorFrom start) map {case (k,v) => (k, f(v))}
+ override def keysIteratorFrom(start: A) = self keysIteratorFrom start
+ override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f
}
/** Adds a number of elements provided by a traversable object
@@ -91,6 +98,28 @@ self =>
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+ /**
+ * Creates an iterator over all the key/value pairs
+ * contained in this map having a key greater than or
+ * equal to `start` according to the ordering of
+ * this map. x.iteratorFrom(y) is equivalent
+ * to but often more efficient than x.from(y).iterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def iteratorFrom(start: A): Iterator[(A, B)]
+ /**
+ * Creates an iterator over all the values contained in this
+ * map that are associated with a key greater than or equal to `start`
+ * according to the ordering of this map. x.valuesIteratorFrom(y) is
+ * equivalent to but often more efficient than
+ * x.from(y).valuesIterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def valuesIteratorFrom(start: A): Iterator[B]
}
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 71b45c72ff..6d1d1ac111 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -40,4 +40,14 @@ self =>
case that: SortedSet[_] if that.ordering == ordering => that.hasAll(this.iterator)
case that => super.subsetOf(that)
}
+
+ /**
+ * Creates an iterator that contains all values from this collection
+ * greater than or equal to `start` according to the ordering of
+ * this collection. x.iteratorFrom(y) is equivalent to but will usually
+ * be more efficient than x.from(y).iterator
+ *
+ * @param start The lower-bound (inclusive) of the iterator
+ */
+ def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start)
}
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 36ef230a42..4ca2095f4c 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
-
package scala.collection
import generic._
-import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
+import mutable.Builder
import scala.util.control.Breaks
/** A trait for traversable collections.
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 5f193eb211..fdbc5e9857 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -77,7 +77,7 @@ trait TraversableLike[+A, +Repr] extends Any
import Traversable.breaks._
/** The type implementing this traversable */
- protected type Self = Repr
+ protected[this] type Self = Repr
/** The collection of type $coll underlying this `TraversableLike` object.
* By default this is implemented as the `TraversableLike` object itself,
@@ -86,7 +86,7 @@ trait TraversableLike[+A, +Repr] extends Any
def repr: Repr = this.asInstanceOf[Repr]
final def isTraversableAgain: Boolean = true
-
+
/** The underlying collection seen as an instance of `$Coll`.
* By default this is implemented as the current collection object itself,
* but this can be overridden.
@@ -174,7 +174,7 @@ trait TraversableLike[+A, +Repr] extends Any
*
* @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B]
* @inheritdoc
- *
+ *
* Example:
* {{{
* scala> val x = List(1)
@@ -252,18 +252,21 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
+ private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
+ val b = newBuilder
+ for (x <- this)
+ if (p(x) != isFlipped) b += x
+
+ b.result
+ }
+
/** Selects all elements of this $coll which satisfy a predicate.
*
* @param p the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that satisfy the given
* predicate `p`. The order of the elements is preserved.
*/
- def filter(p: A => Boolean): Repr = {
- val b = newBuilder
- for (x <- this)
- if (p(x)) b += x
- b.result
- }
+ def filter(p: A => Boolean): Repr = filterImpl(p, isFlipped = false)
/** Selects all elements of this $coll which do not satisfy a predicate.
*
@@ -271,11 +274,11 @@ trait TraversableLike[+A, +Repr] extends Any
* @return a new $coll consisting of all elements of this $coll that do not satisfy the given
* predicate `p`. The order of the elements is preserved.
*/
- def filterNot(p: A => Boolean): Repr = filter(!p(_))
+ def filterNot(p: A => Boolean): Repr = filterImpl(p, isFlipped = true)
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- for (x <- this) if (pf.isDefinedAt(x)) b += pf(x)
+ foreach(pf.runWith(b += _))
b.result
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index a448ac2c09..fcca2da437 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -128,10 +128,8 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
*/
def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
- for (x <- self.toIterator) { // make sure to use an iterator or `seq`
- if (pf isDefinedAt x)
- return Some(pf(x))
- }
+ // make sure to use an iterator or `seq`
+ self.toIterator.foreach(pf.runWith(b => return Some(b)))
None
}
@@ -198,7 +196,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op)
- def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
+ def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
@@ -271,7 +269,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
val b = cbf()
b ++= seq
- b.result
+ b.result()
}
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
@@ -279,7 +277,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
for (x <- self)
b += x
- b.result
+ b.result()
}
def mkString(start: String, sep: String, end: String): String =
@@ -380,27 +378,22 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
object TraversableOnce {
- @deprecated("use OnceCanBuildFrom instead", "2.10.0")
- def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
- @deprecated("use MonadOps instead", "2.10.0")
- def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
-
implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) =
new FlattenOps[A](travs map ev)
/* Functionality reused in Iterator.CanBuildFrom */
- private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] {
- def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B]
- def traversableToColl[B](t: GenTraversable[B]): Coll[B]
+ private[collection] abstract class BufferedCanBuildFrom[A, CC[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[CC[_], A, CC[A]] {
+ def bufferToColl[B](buff: ArrayBuffer[B]): CC[B]
+ def traversableToColl[B](t: GenTraversable[B]): CC[B]
- def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl
+ def newIterator: Builder[A, CC[A]] = new ArrayBuffer[A] mapResult bufferToColl
/** Creates a new builder on request of a collection.
* @param from the collection requesting the builder to be created.
* @return the result of invoking the `genericBuilder` method on `from`.
*/
- def apply(from: Coll[_]): Builder[A, Coll[A]] = from match {
+ def apply(from: CC[_]): Builder[A, CC[A]] = from match {
case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult {
case res => traversableToColl(res.asInstanceOf[GenTraversable[A]])
}
@@ -429,7 +422,7 @@ object TraversableOnce {
def flatten: Iterator[A] = new AbstractIterator[A] {
val its = travs.toIterator
private var it: Iterator[A] = Iterator.empty
- def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next.toIterator; hasNext }
+ def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next().toIterator; hasNext }
def next(): A = if (hasNext) it.next() else Iterator.empty.next()
}
}
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index cce6b72257..af219084b8 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -10,7 +10,6 @@ package scala.collection
import generic._
import mutable.Builder
-import TraversableView.NoBuilder
/** A base trait for non-strict views of traversable collections.
* $traversableViewInfo
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 14f865c2f0..36f6210ef4 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -10,7 +10,6 @@ package scala.collection
import generic._
import mutable.{ Builder, ArrayBuffer }
-import TraversableView.NoBuilder
import scala.annotation.migration
import scala.language.implicitConversions
@@ -59,7 +58,7 @@ trait ViewMkString[+A] {
* $viewInfo
*
* All views for traversable collections are defined by creating a new `foreach` method.
- *
+ *
* @author Martin Odersky
* @version 2.8
* @since 2.8
@@ -162,7 +161,7 @@ trait TraversableViewLike[+A,
// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
// else super.flatMap[B, That](f)(bf)
}
- override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
+ override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
newFlatMapped(asTraversable)
private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 6c11c5bcb5..4eeacd7377 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -41,7 +41,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
@tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else {
// complete the GCAS
val prev = /*READ*/m.prev
- val ctr = ct.readRoot(true)
+ val ctr = ct.readRoot(abort = true)
prev match {
case null =>
@@ -250,7 +250,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct)
else {
if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct)
- else return RESTART // used to be throw RestartException
+ else RESTART // used to be throw RestartException
}
case sn: SNode[K, V] => // 2) singleton node
if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef]
@@ -437,7 +437,7 @@ extends MainNode[K, V] {
val updmap = listmap - k
if (updmap.size > 1) new LNode(updmap)
else {
- val (k, v) = updmap.iterator.next
+ val (k, v) = updmap.iterator.next()
new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
}
}
@@ -545,7 +545,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba
// removed (those existing when the op began)
// - if there are only null-i-nodes below, returns null
def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
- var bmp = bitmap
+ val bmp = bitmap
var i = 0
val arr = array
val tmparray = new Array[BasicNode](arr.length)
@@ -723,7 +723,7 @@ extends scala.collection.concurrent.Map[K, V]
private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
val desc = RDCSS_Descriptor(ov, expectedmain, nv)
if (CAS_ROOT(ov, desc)) {
- RDCSS_Complete(false)
+ RDCSS_Complete(abort = false)
/*READ*/desc.committed
} else false
}
@@ -920,8 +920,8 @@ object TrieMap extends MutableMapFactory[TrieMap] {
private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
- private var stack = new Array[Array[BasicNode]](7)
- private var stackpos = new Array[Int](7)
+ private val stack = new Array[Array[BasicNode]](7)
+ private val stackpos = new Array[Int](7)
private var depth = -1
private var subiter: Iterator[(K, V)] = null
private var current: KVNode[K, V] = null
@@ -1027,7 +1027,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2)
stack(d) = arr1
stackpos(d) = -1
- val it = newIterator(level + 1, ct, false)
+ val it = newIterator(level + 1, ct, _mustInit = false)
it.stack(0) = arr2
it.stackpos(0) = -1
it.depth = 0
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index 87bcae3923..7447c1bbaf 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -25,7 +25,7 @@ import scala.language.implicitConversions
* - `scala.collection.mutable.Buffer` <=> `java.util.List`
* - `scala.collection.mutable.Set` <=> `java.util.Set`
* - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
*
* In all cases, converting from a source type to a target type and back
* again will return the original source object, e.g.
@@ -279,26 +279,6 @@ trait DecorateAsJava {
/**
* Adds an `asJava` method that implicitly converts a Scala mutable
- * `ConcurrentMap` to a Java `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `ConcurrentMap` view of the argument.
- */
- @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
- new AsJava(asJavaConcurrentMap(m))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable
* `concurrent.Map` to a Java `ConcurrentMap`.
*
* The returned Java `ConcurrentMap` is backed by the provided Scala
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 94847a76e3..90e8dded6e 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -144,25 +144,6 @@ trait DecorateAsScala {
/**
* Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
- * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
- * backed by the provided Java `ConcurrentMap` and any side-effects of using
- * it via the Scala interface will be visible via the Java interface and
- * vice versa.
- *
- * If the Java `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `ConcurrentMap` view of the argument.
- */
- @deprecated("Use `mapAsScalaConcurrentMapConverter` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
- new AsScala(asScalaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
* to a Scala mutable `concurrent.Map`. The returned Scala `concurrent.Map` is
* backed by the provided Java `ConcurrentMap` and any side-effects of using
* it via the Scala interface will be visible via the Java interface and
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
index e2c46c1e4f..f004e4712b 100644
--- a/src/library/scala/collection/convert/Decorators.scala
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -9,7 +9,7 @@
package scala.collection
package convert
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import java.{ util => ju }
private[collection] trait Decorators {
/** Generic class containing the `asJava` converter method */
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 5e6126a7cf..9665ffa045 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -235,27 +235,6 @@ trait WrapAsJava {
}
/**
- * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
- * `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asScalaConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java ConcurrentMap will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return A Java `ConcurrentMap` view of the argument.
- */
- @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
- case JConcurrentMapDeprecatedWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapDeprecatedWrapper(m)
- }
-
- /**
* Implicitly converts a Scala mutable `concurrent.Map` to a Java
* `ConcurrentMap`.
*
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index ffcca62291..f43eae10d6 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -12,30 +12,7 @@ package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import scala.language.implicitConversions
-trait LowPriorityWrapAsScala {
- this: WrapAsScala =>
-
- import Wrappers._
-
- /**
- * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
- * The returned Scala ConcurrentMap is backed by the provided Java
- * ConcurrentMap and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java ConcurrentMap was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Scala mutable ConcurrentMap view of the argument.
- */
- @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- implicit def mapAsScalaDeprecatedConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] =
- asScalaConcurrentMap(m)
-}
-
-trait WrapAsScala extends LowPriorityWrapAsScala {
+trait WrapAsScala {
import Wrappers._
/**
* Implicitly converts a Java `Iterator` to a Scala `Iterator`.
@@ -178,25 +155,6 @@ trait WrapAsScala extends LowPriorityWrapAsScala {
* @param m The ConcurrentMap to be converted.
* @return A Scala mutable ConcurrentMap view of the argument.
*/
- @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
- case cmw: ConcurrentMapDeprecatedWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapDeprecatedWrapper(m)
- }
-
- /**
- * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
- * The returned Scala ConcurrentMap is backed by the provided Java
- * ConcurrentMap and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java ConcurrentMap was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Scala mutable ConcurrentMap view of the argument.
- */
implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
case _ => new JConcurrentMapWrapper(m)
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 20add3365d..69e9a8fff4 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -27,9 +27,9 @@ private[collection] trait Wrappers {
case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
def hasNext = underlying.hasNext
- def next() = underlying.next
+ def next() = underlying.next()
def hasMoreElements = underlying.hasNext
- def nextElement() = underlying.next
+ def nextElement() = underlying.next()
def remove() = throw new UnsupportedOperationException
}
@@ -81,7 +81,7 @@ private[collection] trait Wrappers {
override def remove(i: Int) = underlying remove i
}
- case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
+ case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
def length = underlying.size
override def isEmpty = underlying.isEmpty
override def iterator: Iterator[A] = underlying.iterator
@@ -108,7 +108,7 @@ private[collection] trait Wrappers {
val ui = underlying.iterator
var prev: Option[A] = None
def hasNext = ui.hasNext
- def next = { val e = ui.next; prev = Some(e); e }
+ def next = { val e = ui.next(); prev = Some(e); e }
def remove = prev match {
case Some(e) =>
underlying match {
@@ -180,7 +180,7 @@ private[collection] trait Wrappers {
def hasNext = ui.hasNext
def next() = {
- val (k, v) = ui.next
+ val (k, v) = ui.next()
prev = Some(k)
new ju.Map.Entry[A, B] {
import scala.util.hashing.byteswap32
@@ -272,32 +272,10 @@ private[collection] trait Wrappers {
override def empty: Repr = null.asInstanceOf[Repr]
}
- case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
+ case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
override def empty = JMapWrapper(new ju.HashMap[A, B])
}
- class ConcurrentMapDeprecatedWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
-
- def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def remove(k: AnyRef, v: AnyRef) = try {
- underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
- } catch {
- case ex: ClassCastException =>
- false
- }
-
- def replace(k: A, v: B): B = underlying.replace(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
- }
-
class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
@@ -320,32 +298,7 @@ private[collection] trait Wrappers {
def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
}
- case class JConcurrentMapDeprecatedWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapDeprecatedWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
- override def get(k: A) = {
- val v = underlying get k
- if (v != null) Some(v)
- else None
- }
-
- override def empty = new JConcurrentMapDeprecatedWrapper(new juc.ConcurrentHashMap[A, B])
-
- def putIfAbsent(k: A, v: B): Option[B] = {
- val r = underlying.putIfAbsent(k, v)
- if (r != null) Some(r) else None
- }
-
- def remove(k: A, v: B): Boolean = underlying.remove(k, v)
-
- def replace(k: A, v: B): Option[B] = {
- val prev = underlying.replace(k, v)
- if (prev != null) Some(prev) else None
- }
-
- def replace(k: A, oldvalue: B, newvalue: B): Boolean =
- underlying.replace(k, oldvalue, newvalue)
- }
-
- case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
+ case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
override def get(k: A) = {
val v = underlying get k
if (v != null) Some(v)
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index e869bba51a..5a183c307b 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -44,7 +44,7 @@ abstract class GenMapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A
* @tparam B the type of the associated values
* @return a new $coll consisting key/value pairs given by `elems`.
*/
- def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result
+ def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result()
/** The default builder for $Coll objects.
* @tparam A the type of the keys
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 2d3f7e609b..0e1a5534c0 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -38,12 +38,10 @@ import scala.language.higherKinds
abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
extends GenericCompanion[CC] {
- // A default implementation of GenericCanBuildFrom which can be cast
- // to whatever is desired.
- private class ReusableCBF extends GenericCanBuildFrom[Nothing] {
+ private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
- lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF
+ def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance
/** A generic implementation of the `CanBuildFrom` trait, which forwards
* all calls to `apply(from)` to the `genericBuilder` method of
@@ -75,7 +73,7 @@ extends GenericCompanion[CC] {
b.sizeHint(xss.map(_.size).sum)
for (xs <- xss.seq) b ++= xs
- b.result
+ b.result()
}
/** Produces a $coll containing the results of some element computation a number of times.
@@ -91,7 +89,7 @@ extends GenericCompanion[CC] {
b += elem
i += 1
}
- b.result
+ b.result()
}
/** Produces a two-dimensional $coll containing the results of some element computation a number of times.
@@ -149,7 +147,7 @@ extends GenericCompanion[CC] {
b += f(i)
i += 1
}
- b.result
+ b.result()
}
/** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
@@ -218,13 +216,13 @@ extends GenericCompanion[CC] {
if (step == zero) throw new IllegalArgumentException("zero step")
val b = newBuilder[T]
- b sizeHint immutable.NumericRange.count(start, end, step, false)
+ b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false)
var i = start
while (if (step < zero) end < i else i < end) {
b += i
i += step
}
- b.result
+ b.result()
}
/** Produces a $coll containing repeated applications of a function to a start value.
@@ -248,7 +246,6 @@ extends GenericCompanion[CC] {
b += acc
}
}
- b.result
+ b.result()
}
}
-
diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
index a587bbf544..cdfee5252f 100644
--- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala
+++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
@@ -19,15 +19,15 @@ import scala.reflect.ClassTag
* @author Aleksandar Prokopec
*/
abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] {
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]]
- def empty[A: ClassTag]: CC[A] = newBuilder[A].result
+ def empty[A: ClassTag]: CC[A] = newBuilder[A].result()
def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = {
val b = newBuilder[A]
b ++= elems
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 5b03f8e5c6..66052d0e6f 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -24,7 +24,7 @@ import scala.language.higherKinds
*/
abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** The underlying collection type with unknown element type */
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
/** The default builder for `$Coll` objects.
* @tparam A the type of the ${coll}'s elements
@@ -34,7 +34,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** An empty collection of type `$Coll[A]`
* @tparam A the type of the ${coll}'s elements
*/
- def empty[A]: CC[A] = newBuilder[A].result
+ def empty[A]: CC[A] = newBuilder[A].result()
/** Creates a $coll with the specified elements.
* @tparam A the type of the ${coll}'s elements
@@ -46,7 +46,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
else {
val b = newBuilder[A]
b ++= elems
- b.result
+ b.result()
}
}
}
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index a9a50a1c35..7a0c0a63e8 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -19,16 +19,16 @@ import scala.language.higherKinds
* @since 2.8
*/
abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] {
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]]
- def empty[A: Ordering]: CC[A] = newBuilder[A].result
+ def empty[A: Ordering]: CC[A] = newBuilder[A].result()
def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = {
val b = newBuilder[A]
b ++= elems
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index f7a8a9aa88..908aa5b126 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -88,7 +88,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
b1 += x
b2 += y
}
- (b1.result, b2.result)
+ (b1.result(), b2.result())
}
/** Converts this $coll of triples into three collections of the first, second,
@@ -113,7 +113,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
b2 += y
b3 += z
}
- (b1.result, b2.result, b3.result)
+ (b1.result(), b2.result(), b3.result())
}
/** Converts this $coll of traversable collections into
@@ -144,7 +144,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
val b = genericBuilder[B]
for (xs <- sequential)
b ++= asTraversable(xs).seq
- b.result
+ b.result()
}
/** Transposes this $coll of traversable collections into
@@ -161,7 +161,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
@migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0")
def transpose[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[CC[B] @uncheckedVariance] = {
if (isEmpty)
- return genericBuilder[CC[B]].result
+ return genericBuilder[CC[B]].result()
def fail = throw new IllegalArgumentException("transpose requires all collections have the same size")
@@ -179,7 +179,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
}
val bb = genericBuilder[CC[B]]
for (b <- bs) bb += b.result
- bb.result
+ bb.result()
}
}
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index cb75212e3d..52a0d32de1 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-
package scala.collection
package generic
+import scala.annotation.tailrec
+
/** This trait forms part of collections that can be augmented
* using a `+=` operator and that can be cleared of all elements using
* a `clear` method.
@@ -45,7 +46,19 @@ trait Growable[-A] extends Clearable {
* @param xs the TraversableOnce producing the elements to $add.
* @return the $coll itself.
*/
- def ++=(xs: TraversableOnce[A]): this.type = { xs.seq foreach += ; this }
+ def ++=(xs: TraversableOnce[A]): this.type = {
+ @tailrec def loop(xs: collection.LinearSeq[A]) {
+ if (xs.nonEmpty) {
+ this += xs.head
+ loop(xs.tail)
+ }
+ }
+ xs.seq match {
+ case xs: collection.LinearSeq[_] => loop(xs)
+ case xs => xs foreach +=
+ }
+ this
+ }
/** Clears the $coll's contents. After this operation, the
* $coll is empty.
diff --git a/src/library/scala/ScalaObject.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala
index f67dc3a6c5..e86d163b3c 100644
--- a/src/library/scala/ScalaObject.scala
+++ b/src/library/scala/collection/generic/IndexedSeqFactory.scala
@@ -6,11 +6,16 @@
** |/ **
\* */
-package scala
+package scala.collection
+package generic
-/** Until scala 2.10.0 this marker trait was added to
- * scala-compiled classes. Now it only exists for backward
- * compatibility.
+import language.higherKinds
+
+/** A template for companion objects of IndexedSeq and subclasses thereof.
+ *
+ * @since 2.11
*/
-@deprecated("ScalaObject will be removed", "2.10.0")
-trait ScalaObject
+abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] {
+ override def ReusableCBF: GenericCanBuildFrom[Nothing] =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+}
diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala
new file mode 100644
index 0000000000..d1dffdf8cf
--- /dev/null
+++ b/src/library/scala/collection/generic/IsSeqLike.scala
@@ -0,0 +1,57 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `SeqLike[A, Repr]`.
+ *
+ * This type enables simple enrichment of `Seq`s with extension methods which
+ * can make full use of the mechanics of the Scala collections framework in
+ * their implementation.
+ *
+ * Example usage:
+ * {{{
+ * class FilterMapImpl[A, Repr](val r: SeqLike[A, Repr]) {
+ * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+ * r.flatMap(f(_))
+ * }
+ * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsSeqLike[Repr]): FilterMapImpl[fr.A,Repr] =
+ * new FilterMapImpl(fr.conversion(r))
+ *
+ * val l = List(1, 2, 3, 4, 5)
+ * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+ * // == List(2, 4)
+ * }}}
+ *
+ * @see [[scala.collection.Seq]]
+ * @see [[scala.collection.generic.IsTraversableLike]]
+ */
+trait IsSeqLike[Repr] {
+ /** The type of elements we can traverse over. */
+ type A
+ /** A conversion from the representation type `Repr` to a `SeqLike[A,Repr]`. */
+ val conversion: Repr => SeqLike[A, Repr]
+}
+
+object IsSeqLike {
+ import language.higherKinds
+
+ implicit val stringRepr: IsSeqLike[String] { type A = Char } =
+ new IsSeqLike[String] {
+ type A = Char
+ val conversion = implicitly[String => SeqLike[Char, String]]
+ }
+
+ implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } =
+ new IsSeqLike[C[A0]] {
+ type A = A0
+ val conversion = conv
+ }
+}
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 90ebcace84..8feace3f8b 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
-
package scala.collection.generic
-import scala.collection._
-import scala.collection.mutable.Buffer
+import scala.collection._
/** This trait implements a forwarder for iterable objects. It forwards
* all calls to a different iterable object, except for
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index e8b15ec450..aafaffc159 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -50,7 +50,7 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying lastIndexOfSlice that
override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
override def containsSlice[B](that: GenSeq[B]): Boolean = underlying containsSlice that
- override def contains(elem: Any): Boolean = underlying contains elem
+ override def contains[A1 >: A](elem: A1): Boolean = underlying contains elem
override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
override def indices: Range = underlying.indices
}
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index 498db7f8fa..442a7c126e 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -140,7 +140,7 @@ trait AtomicIndexFlag extends Signalling {
val old = intflag.get
if (f <= old) loop = false
else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
+ } while (loop)
}
abstract override def setIndexFlagIfLesser(f: Int) = {
var loop = true
@@ -148,7 +148,7 @@ trait AtomicIndexFlag extends Signalling {
val old = intflag.get
if (f >= old) loop = false
else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
+ } while (loop)
}
}
@@ -163,7 +163,7 @@ trait DelegatedSignalling extends Signalling {
var signalDelegate: Signalling
def isAborted = signalDelegate.isAborted
- def abort() = signalDelegate.abort
+ def abort() = signalDelegate.abort()
def indexFlag = signalDelegate.indexFlag
def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f)
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index f962b26bd3..2c3d200d01 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -71,28 +71,40 @@ trait Sorted[K, +This <: Sorted[K, This]] {
def to(to: K): This = {
val i = keySet.from(to).iterator
if (i.isEmpty) return repr
- val next = i.next
+ val next = i.next()
if (compare(next, to) == 0)
if (i.isEmpty) repr
- else until(i.next)
+ else until(i.next())
else
until(next)
}
+
+ /**
+ * Creates an iterator over all the keys(or elements) contained in this
+ * collection greater than or equal to `start`
+ * according to the ordering of this collection. x.keysIteratorFrom(y)
+ * is equivalent to but often more efficient than
+ * x.from(y).keysIterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def keysIteratorFrom(start: K): Iterator[K]
protected def hasAll(j: Iterator[K]): Boolean = {
val i = keySet.iterator
if (i.isEmpty) return j.isEmpty
- var in = i.next;
+ var in = i.next()
while (j.hasNext) {
- val jn = j.next;
+ val jn = j.next()
while ({
- val n = compare(jn, in);
- if (n == 0) false;
- else if (n < 0) return false;
- else if (!i.hasNext) return false;
- else true;
- }) in = i.next;
+ val n = compare(jn, in)
+ if (n == 0) false
+ else if (n < 0) return false
+ else if (!i.hasNext) return false
+ else true
+ }) in = i.next()
}
true
}
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index 17201b0f7a..fb3fe0fcb6 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -24,7 +24,7 @@ abstract class SortedMapFactory[CC[A, B] <: SortedMap[A, B] with SortedMapLike[A
def empty[A, B](implicit ord: Ordering[A]): CC[A, B]
- def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = (newBuilder[A, B](ord) ++= elems).result
+ def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = (newBuilder[A, B](ord) ++= elems).result()
def newBuilder[A, B](implicit ord: Ordering[A]): Builder[(A, B), CC[A, B]] =
new MapBuilder[A, B, CC[A, B]](empty(ord))
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index 08bca04e42..f48e1c69e1 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -23,11 +23,11 @@ abstract class SortedSetFactory[CC[A] <: SortedSet[A] with SortedSetLike[A, CC[A
def empty[A](implicit ord: Ordering[A]): CC[A]
- def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = (newBuilder[A](ord) ++= elems).result
+ def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = (newBuilder[A](ord) ++= elems).result()
def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty)
- implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord);
+ implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord)
class SortedSetCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, A, CC[A]] {
def apply(from: Coll) = newBuilder[A](ord)
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index ed3630edc1..1da6edd740 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -31,9 +31,6 @@ abstract class BitSet extends scala.collection.AbstractSet[Int]
with Serializable {
override def empty = BitSet.empty
- @deprecated("Use BitSet.fromBitMask[NoCopy] instead of fromArray", "2.10.0")
- def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
-
protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems)
/** Update word at index `idx`; enlarge set if `idx` outside range of set.
@@ -74,7 +71,7 @@ object BitSet extends BitSetFactory[BitSet] {
def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] {
private[this] val b = new mutable.BitSet
def += (x: Int) = { b += x; this }
- def clear() = b.clear
+ def clear() = b.clear()
def result() = b.toImmutable
}
@@ -82,10 +79,6 @@ object BitSet extends BitSetFactory[BitSet] {
implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
/** A bitset containing all the bits in an array */
- @deprecated("Use fromBitMask[NoCopy] instead of fromArray", "2.10.0")
- def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
-
- /** A bitset containing all the bits in an array */
def fromBitMask(elems: Array[Long]): BitSet = {
val len = elems.length
if (len == 0) empty
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
index 4a0503adfd..5ae5ef66fb 100755
--- a/src/library/scala/collection/immutable/DefaultMap.scala
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
-
package scala.collection
package immutable
-import generic._
-
/** A default map which implements the `+` and `-`
* methods of maps. It does so using the default builder for
* maps defined in the `Map` object.
@@ -42,7 +38,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
val b = Map.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
/** A default implementation which creates a new immutable map.
@@ -50,7 +46,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
override def - (key: A): Map[A, B] = {
val b = newBuilder
for (kv <- this.seq ; if kv._1 != key) b += kv
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/immutable/GenIterable.scala.disabled b/src/library/scala/collection/immutable/GenIterable.scala.disabled
deleted file mode 100644
index d34f7fd856..0000000000
--- a/src/library/scala/collection/immutable/GenIterable.scala.disabled
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A base trait for iterable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $iterableInfo
- */
-trait GenIterable[+A] extends GenTraversable[A]
- with scala.collection.GenIterable[A]
- with scala.collection.GenIterableLike[A, GenIterable[A]]
-// with GenericTraversableTemplate[A, GenIterable]
-{
- def seq: Iterable[A]
- //override def companion: GenericCompanion[GenIterable] = GenIterable
-}
-
-
-// object GenIterable extends TraversableFactory[GenIterable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
-// }
-
diff --git a/src/library/scala/collection/immutable/GenMap.scala.disabled b/src/library/scala/collection/immutable/GenMap.scala.disabled
deleted file mode 100644
index 73557a4a66..0000000000
--- a/src/library/scala/collection/immutable/GenMap.scala.disabled
+++ /dev/null
@@ -1,36 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package immutable
-
-import generic._
-
-
-/** A base trait for maps that can be mutated.
- * $possiblyparinfo
- * $mapNote
- * $mapTags
- * @since 1.0
- * @author Matthias Zenger
- */
-trait GenMap[A, +B]
-extends GenIterable[(A, B)]
- with scala.collection.GenMap[A, B]
- with scala.collection.GenMapLike[A, B, GenMap[A, B]]
-{
- def seq: Map[A, B]
-}
-
-
-// object GenMap extends MapFactory[GenMap] {
-// def empty[A, B]: Map[A, B] = Map.empty
-
-// /** $mapCanBuildFromInfo */
-// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
-// }
diff --git a/src/library/scala/collection/immutable/GenSeq.scala.disabled b/src/library/scala/collection/immutable/GenSeq.scala.disabled
deleted file mode 100644
index 713529f3db..0000000000
--- a/src/library/scala/collection/immutable/GenSeq.scala.disabled
+++ /dev/null
@@ -1,49 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A subtrait of `collection.GenSeq` which represents sequences
- * that can be mutated.
- *
- * $possiblyparinfo
- *
- * $seqInfo
- *
- * The class adds an `update` method to `collection.Seq`.
- *
- * @define Coll `mutable.Seq`
- * @define coll mutable sequence
- */
-trait GenSeq[+A] extends GenIterable[A]
- with scala.collection.GenSeq[A]
- with scala.collection.GenSeqLike[A, GenSeq[A]]
-// with GenericTraversableTemplate[A, GenSeq]
-{
- def seq: Seq[A]
- //override def companion: GenericCompanion[GenSeq] = GenSeq
-}
-
-
-// object GenSeq extends SeqFactory[GenSeq] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
-// }
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/GenSet.scala.disabled b/src/library/scala/collection/immutable/GenSet.scala.disabled
deleted file mode 100644
index 56bd2738fd..0000000000
--- a/src/library/scala/collection/immutable/GenSet.scala.disabled
+++ /dev/null
@@ -1,43 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A generic trait for mutable sets.
- *
- * $possiblyparinfo
- * $setNote
- * $setTags
- *
- * @since 1.0
- * @author Matthias Zenger
- * @define Coll `mutable.Set`
- * @define coll mutable set
- */
-trait GenSet[A] extends GenIterable[A]
- with scala.collection.GenSet[A]
- with scala.collection.GenSetLike[A, GenSet[A]]
-// with GenericSetTemplate[A, GenSet]
-{
- //override def companion: GenericCompanion[GenSet] = GenSet
- def seq: Set[A]
-}
-
-
-// object GenSet extends TraversableFactory[GenSet] {
-// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A] = Set.newBuilder
-// }
diff --git a/src/library/scala/collection/immutable/GenTraversable.scala.disabled b/src/library/scala/collection/immutable/GenTraversable.scala.disabled
deleted file mode 100644
index e5b609f9ed..0000000000
--- a/src/library/scala/collection/immutable/GenTraversable.scala.disabled
+++ /dev/null
@@ -1,41 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A trait for traversable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $traversableInfo
- * @define mutability mutable
- */
-trait GenTraversable[+A] extends scala.collection.GenTraversable[A]
- with scala.collection.GenTraversableLike[A, GenTraversable[A]]
-// with GenericTraversableTemplate[A, GenTraversable]
- with Mutable
-{
- def seq: Traversable[A]
- //override def companion: GenericCompanion[GenTraversable] = GenTraversable
-}
-
-
-// object GenTraversable extends TraversableFactory[GenTraversable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenTraversable[A]] = Traversable.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 84416a62d2..44e5304e09 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -87,9 +87,6 @@ class HashMap[A, +B] extends AbstractMap[A, B]
def split: Seq[HashMap[A, B]] = Seq(this)
- @deprecated("Use the `merged` method instead.", "2.10.0")
- def merge[B1 >: B](that: HashMap[A, B1], mergef: MergeFunction[A, B1] = null): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
-
/** Creates a new map which is the merge of this and the argument hash map.
*
* Uses the specified collision resolution function if two keys are the same.
@@ -398,7 +395,7 @@ time { mNew.iterator.foreach( p => ()) }
*/
override def foreach[U](f: ((A, B)) => U): Unit = {
- var i = 0;
+ var i = 0
while (i < elems.length) {
elems(i).foreach(f)
i += 1
@@ -471,9 +468,6 @@ time { mNew.iterator.foreach( p => ()) }
// condition below is due to 2 things:
// 1) no unsigned int compare on JVM
// 2) 0 (no lsb) should always be greater in comparison
- val a = thislsb - 1
- val b = thatlsb - 1
-
if (unsignedCompare(thislsb - 1, thatlsb - 1)) {
val m = thiselems(thisi)
totalelems += m.size
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 87995f705f..e17f07c87b 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -301,8 +301,8 @@ time { mNew.iterator.foreach( p => ()) }
*/
override def foreach[U](f: A => U): Unit = {
- var i = 0;
- while (i < elems.length) {
+ var i = 0
+ while (i < elems.length) {
elems(i).foreach(f)
i += 1
}
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index 96414c07ef..9316ff5e72 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -31,14 +31,13 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends SeqFactory[IndexedSeq] {
- override lazy val ReusableCBF =
- scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable {
def length = buf.length
def apply(idx: Int) = buf.apply(idx)
}
def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A]
+
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index ab1faf363e..07e2ddaae2 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -12,6 +12,7 @@ package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
import scala.collection.mutable.{ Builder, MapBuilder }
+import scala.annotation.tailrec
/** Utility class for integer maps.
* @author David MacIver
@@ -50,8 +51,10 @@ object IntMap {
def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B])
}
- def empty[T] : IntMap[T] = IntMap.Nil;
- def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value);
+ def empty[T] : IntMap[T] = IntMap.Nil
+
+ def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value)
+
def apply[T](elems: (Int, T)*): IntMap[T] =
elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
@@ -427,6 +430,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
/**
* The entry with the lowest key value considered in unsigned order.
*/
+ @tailrec
final def firstKey: Int = this match {
case Bin(_, _, l, r) => l.firstKey
case Tip(k, v) => k
@@ -436,6 +440,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
/**
* The entry with the highest key value considered in unsigned order.
*/
+ @tailrec
final def lastKey: Int = this match {
case Bin(_, _, l, r) => r.lastKey
case Tip(k, v) => k
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 2d6952ff92..be233d06cb 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -158,7 +158,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @usecase def mapConserve(f: A => A): List[A]
* @inheritdoc
*/
- def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
+ @inline final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
@tailrec
def loop(mapped: ListBuffer[B], unchanged: List[A], pending: List[A]): List[B] =
if (pending.isEmpty) {
@@ -263,7 +263,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
(b.toList, these)
}
- override def takeWhile(p: A => Boolean): List[A] = {
+ @inline final override def takeWhile(p: A => Boolean): List[A] = {
val b = new ListBuffer[A]
var these = this
while (!these.isEmpty && p(these.head)) {
@@ -273,7 +273,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
b.toList
}
- override def dropWhile(p: A => Boolean): List[A] = {
+ @inline final override def dropWhile(p: A => Boolean): List[A] = {
@tailrec
def loop(xs: List[A]): List[A] =
if (xs.isEmpty || !p(xs.head)) xs
@@ -282,7 +282,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
loop(this)
}
- override def span(p: A => Boolean): (List[A], List[A]) = {
+ @inline final override def span(p: A => Boolean): (List[A], List[A]) = {
val b = new ListBuffer[A]
var these = this
while (!these.isEmpty && p(these.head)) {
@@ -292,6 +292,16 @@ sealed abstract class List[+A] extends AbstractSeq[A]
(b.toList, these)
}
+ // Overridden with an implementation identical to the inherited one (at this time)
+ // solely so it can be finalized and thus inlinable.
+ @inline final override def foreach[U](f: A => U) {
+ var these = this
+ while (!these.isEmpty) {
+ f(these.head)
+ these = these.tail
+ }
+ }
+
override def reverse: List[A] = {
var result: List[A] = Nil
var these = this
@@ -301,7 +311,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
result
}
-
+
override def foldRight[B](z: B)(op: (A, B) => B): B =
reverse.foldLeft(z)((right, left) => op(left, right))
@@ -310,18 +320,6 @@ sealed abstract class List[+A] extends AbstractSeq[A]
override def toStream : Stream[A] =
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
-
- @inline override final
- def foreach[B](f: A => B) {
- var these = this
- while (!these.isEmpty) {
- f(these.head)
- these = these.tail
- }
- }
-
- @deprecated("use `distinct` instead", "2.8.0")
- def removeDuplicates: List[A] = distinct
}
/** The empty list.
@@ -386,9 +384,6 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
* @define Coll `List`
*/
object List extends SeqFactory[List] {
-
- import scala.collection.{Iterable, Seq, IndexedSeq}
-
/** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
@@ -398,255 +393,9 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
-
- /** Create a sorted list with element values `v,,>n+1,, = step(v,,n,,)`
- * where `v,,0,, = start` and elements are in the range between `start`
- * (inclusive) and `end` (exclusive).
- *
- * @param start the start value of the list
- * @param end the end value of the list
- * @param step the increment function of the list, which given `v,,n,,`,
- * computes `v,,n+1,,`. Must be monotonically increasing
- * or decreasing.
- * @return the sorted list of all integers in range `[start;end)`.
- */
- @deprecated("use `iterate` instead", "2.8.0")
- def range(start: Int, end: Int, step: Int => Int): List[Int] = {
- val up = step(start) > start
- val down = step(start) < start
- val b = new ListBuffer[Int]
- var i = start
- while ((!up || i < end) && (!down || i > end)) {
- b += i
- val next = step(i)
- if (i == next)
- throw new IllegalArgumentException("the step function did not make any progress on "+ i)
- i = next
- }
- b.toList
- }
-
- /** Create a list containing several copies of an element.
- *
- * @param n the length of the resulting list
- * @param elem the element composing the resulting list
- * @return a list composed of `n` elements all equal to `elem`
- */
- @deprecated("use `fill` instead", "2.8.0")
- def make[A](n: Int, elem: A): List[A] = {
- val b = new ListBuffer[A]
- var i = 0
- while (i < n) {
- b += elem
- i += 1
- }
- b.toList
- }
-
- /** Concatenate all the elements of a given list of lists.
- *
- * @param xss the list of lists that are to be concatenated
- * @return the concatenation of all the lists
- */
- @deprecated("use `xss.flatten` instead of `List.flatten(xss)`", "2.8.0")
- def flatten[A](xss: List[List[A]]): List[A] = {
- val b = new ListBuffer[A]
- for (xs <- xss) {
- var xc = xs
- while (!xc.isEmpty) {
- b += xc.head
- xc = xc.tail
- }
- }
- b.toList
- }
-
- /** Transforms a list of pairs into a pair of lists.
- *
- * @param xs the list of pairs to unzip
- * @return a pair of lists.
- */
- @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
- def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
- val b1 = new ListBuffer[A]
- val b2 = new ListBuffer[B]
- var xc = xs
- while (!xc.isEmpty) {
- b1 += xc.head._1
- b2 += xc.head._2
- xc = xc.tail
- }
- (b1.toList, b2.toList)
- }
-
- /** Transforms an iterable of pairs into a pair of lists.
- *
- * @param xs the iterable of pairs to unzip
- * @return a pair of lists.
- */
- @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
- def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
- xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
- case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
- }
-
- /**
- * Returns the `Left` values in the given `Iterable` of `Either`s.
- */
- @deprecated("use `xs collect { case Left(x: A) => x }` instead of `List.lefts(xs)`", "2.8.0")
- def lefts[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[A]](Nil)((e, as) => e match {
- case Left(a) => a :: as
- case Right(_) => as
- })
-
- /**
- * Returns the `Right` values in the given `Iterable` of `Either`s.
- */
- @deprecated("use `xs collect { case Right(x: B) => x }` instead of `List.rights(xs)`", "2.8.0")
- def rights[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[B]](Nil)((e, bs) => e match {
- case Left(_) => bs
- case Right(b) => b :: bs
- })
-
- /** Transforms an Iterable of Eithers into a pair of lists.
- *
- * @param es the iterable of Eithers to separate
- * @return a pair of lists.
- */
- @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0")
- def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) =
- es.foldRight[(List[A], List[B])]((Nil, Nil)) {
- case (Left(a), (lefts, rights)) => (a :: lefts, rights)
- case (Right(b), (lefts, rights)) => (lefts, b :: rights)
- }
-
- /** Converts an iterator to a list.
- *
- * @param it the iterator to convert
- * @return a list that contains the elements returned by successive
- * calls to `it.next`
- */
- @deprecated("use `it.toList` instead of `List.toList(it)`", "2.8.0")
- def fromIterator[A](it: Iterator[A]): List[A] = it.toList
-
- /** Converts an array into a list.
- *
- * @param arr the array to convert
- * @return a list that contains the same elements than `arr`
- * in the same order
- */
- @deprecated("use `array.toList` instead of `List.fromArray(array)`", "2.8.0")
- def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
-
- /** Converts a range of an array into a list.
- *
- * @param arr the array to convert
- * @param start the first index to consider
- * @param len the length of the range to convert
- * @return a list that contains the same elements than `arr`
- * in the same order
- */
- @deprecated("use `array.view(start, end).toList` instead of `List.fromArray(array, start, end)`", "2.8.0")
- def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
- var res: List[A] = Nil
- var i = start + len
- while (i > start) {
- i -= 1
- res = arr(i) :: res
- }
- res
- }
-
- /** Returns the list resulting from applying the given function `f`
- * to corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return `[f(a,,0,,,b,,0,,), ..., f(a,,n,,,b,,n,,)]` if the lists are
- * `[a,,0,,, ..., a,,k,,]`, `[b,,0,,, ..., b,,l,,]` and
- * `n = min(k,l)`
- */
- @deprecated("use `(xs, ys).zipped.map(f)` instead of `List.map2(xs, ys)(f)`", "2.8.0")
- def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
- val b = new ListBuffer[C]
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- b += f(xc.head, yc.head)
- xc = xc.tail
- yc = yc.tail
- }
- b.toList
- }
-
- /** Tests whether the given predicate `p` holds
- * for all corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return `(p(a<sub>0</sub>,b<sub>0</sub>) &amp;&amp;
- * ... &amp;&amp; p(a<sub>n</sub>,b<sub>n</sub>))]`
- * if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`;
- * `[b<sub>0</sub>, ..., b<sub>l</sub>]`
- * and `n = min(k,l)`
- */
- @deprecated("use `(xs, ys).zipped.forall(f)` instead of `List.forall2(xs, ys)(f)`", "2.8.0")
- def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- if (!f(xc.head, yc.head)) return false
- xc = xc.tail
- yc = yc.tail
- }
- true
- }
-
- /** Tests whether the given predicate `p` holds
- * for some corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return `n != 0 &amp;&amp; (p(a<sub>0</sub>,b<sub>0</sub>) ||
- * ... || p(a<sub>n</sub>,b<sub>n</sub>))]` if the lists are
- * `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
- * `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
- * `n = min(k,l)`
- */
- @deprecated("use `(xs, ys).zipped.exists(f)` instead of `List.exists2(xs, ys)(f)`", "2.8.0")
- def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- if (f(xc.head, yc.head)) return true
- xc = xc.tail
- yc = yc.tail
- }
- false
- }
-
- /** Transposes a list of lists.
- * pre: All element lists have the same length.
- *
- * @param xss the list of lists
- * @return the transposed list of lists
- */
- @deprecated("use `xss.transpose` instead of `List.transpose(xss)`", "2.8.0")
- def transpose[A](xss: List[List[A]]): List[List[A]] = {
- val buf = new ListBuffer[List[A]]
- var yss = xss
- while (!yss.head.isEmpty) {
- buf += (yss map (_.head))
- yss = (yss map (_.tail))
- }
- buf.toList
- }
}
/** Only used for list serialization */
-@SerialVersionUID(0L - 8287891243975527522L)
-private[scala] case object ListSerializeStart
-
-/** Only used for list serialization */
@SerialVersionUID(0L - 8476791151975527571L)
private[scala] case object ListSerializeEnd
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 6cf6c4259e..def3d7eb23 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -75,7 +75,7 @@ class ListSet[A] extends AbstractSet[A]
* @return number of set elements.
*/
override def size: Int = 0
- override def isEmpty: Boolean = true;
+ override def isEmpty: Boolean = true
/** Checks if this set contains element `elem`.
*
@@ -100,7 +100,7 @@ class ListSet[A] extends AbstractSet[A]
*/
override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else (new ListSet.ListSetBuilder(this) ++= xs.seq).result
+ else (new ListSet.ListSetBuilder(this) ++= xs.seq).result()
private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
private[ListSet] def unchecked_outer: ListSet[A] =
@@ -120,18 +120,18 @@ class ListSet[A] extends AbstractSet[A]
that = that.tail
res
}
- else Iterator.empty.next
+ else Iterator.empty.next()
}
/**
* @throws Predef.NoSuchElementException
*/
- override def head: A = throw new NoSuchElementException("Set has no elements");
+ override def head: A = throw new NoSuchElementException("Set has no elements")
/**
* @throws Predef.NoSuchElementException
*/
- override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
+ override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
override def stringPrefix = "ListSet"
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 2a2910439a..506546c5ba 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -12,6 +12,7 @@ package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
import scala.collection.mutable.{ Builder, MapBuilder }
+import scala.annotation.tailrec
/** Utility class for long maps.
* @author David MacIver
@@ -77,8 +78,6 @@ object LongMap {
}
}
-import LongMap._
-
// Iterator over a non-empty LongMap.
private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] {
@@ -98,7 +97,7 @@ private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends
buffer(index) = x.asInstanceOf[AnyRef]
index += 1
}
- push(it);
+ push(it)
/**
* What value do we assign to a tip?
@@ -179,7 +178,7 @@ extends AbstractMap[Long, T]
*/
override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
- case LongMap.Tip(key, value) => f((key, value));
+ case LongMap.Tip(key, value) => f((key, value))
case LongMap.Nil =>
}
@@ -418,5 +417,20 @@ extends AbstractMap[Long, T]
def ++[S >: T](that: LongMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
+
+ @tailrec
+ final def firstKey: Long = this match {
+ case LongMap.Bin(_, _, l, r) => l.firstKey
+ case LongMap.Tip(k, v) => k
+ case LongMap.Nil => sys.error("Empty set")
+ }
+
+ @tailrec
+ final def lastKey: Long = this match {
+ case LongMap.Bin(_, _, l, r) => r.lastKey
+ case LongMap.Tip(k , v) => k
+ case LongMap.Nil => sys.error("Empty set")
+ }
+
}
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 7e60f07847..1c2ab1c662 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -123,7 +123,7 @@ self =>
def transform[C, That](f: (A, B) => C)(implicit bf: CanBuildFrom[This, (A, C), That]): That = {
val b = bf(repr)
for ((key, value) <- this) b += ((key, f(key, value)))
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index d3be299f89..195aeed281 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
package scala.collection
package immutable
import mutable.{ Builder, ListBuffer }
-import generic._
/** `NumericRange` is a more generic version of the
* `Range` class which works with arbitrary types.
@@ -81,17 +79,6 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// to guard against any (most likely illusory) performance drop. They should
// be eliminated one way or another.
- // Counts how many elements from the start meet the given test.
- private def skipCount(p: T => Boolean): Int = {
- var current = start
- var counted = 0
-
- while (counted < length && p(current)) {
- counted += 1
- current += step
- }
- counted
- }
// Tests whether a number is within the endpoints, without testing
// whether it is a member of the sequence (i.e. when step > 1.)
private def isWithinBoundaries(elem: T) = !isEmpty && (
@@ -124,21 +111,21 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
else locationAfterN(idx)
}
-
+
import NumericRange.defaultOrdering
-
+
override def min[T1 >: T](implicit ord: Ordering[T1]): T =
if (ord eq defaultOrdering(num)) {
if (num.signum(step) > 0) start
else last
} else super.min(ord)
-
- override def max[T1 >: T](implicit ord: Ordering[T1]): T =
+
+ override def max[T1 >: T](implicit ord: Ordering[T1]): T =
if (ord eq defaultOrdering(num)) {
if (num.signum(step) > 0) last
else start
} else super.max(ord)
-
+
// Motivated by the desire for Double ranges with BigDecimal precision,
// we need some way to map a Range and get another Range. This can't be
// done in any fully general way because Ranges are not arbitrary
@@ -182,12 +169,11 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
def containsTyped(x: T): Boolean =
isWithinBoundaries(x) && (((x - start) % step) == zero)
- override def contains(x: Any): Boolean =
+ override def contains[A1 >: T](x: A1): Boolean =
try containsTyped(x.asInstanceOf[T])
catch { case _: ClassCastException => false }
final override def sum[B >: T](implicit num: Numeric[B]): B = {
- import num.Ops
if (isEmpty) this.num fromInt 0
else if (numRangeElements == 1) head
else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2))
@@ -213,7 +199,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
/** A companion object for numeric ranges.
*/
object NumericRange {
-
+
/** Calculates the number of elements in a range given start, end, step, and
* whether or not it is inclusive. Throws an exception if step == 0 or
* the number of elements exceeds the maximum Int.
@@ -272,7 +258,7 @@ object NumericRange {
new Exclusive(start, end, step)
def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] =
new Inclusive(start, end, step)
-
+
private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]](
Numeric.BigIntIsIntegral -> Ordering.BigInt,
Numeric.IntIsIntegral -> Ordering.Int,
@@ -284,6 +270,6 @@ object NumericRange {
Numeric.DoubleAsIfIntegral -> Ordering.Double,
Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal
)
-
+
}
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 952107bf78..4069f6f0e4 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -30,7 +30,7 @@ object PagedSeq {
new PagedSeq[T]((data: Array[T], start: Int, len: Int) => {
var i = 0
while (i < len && source.hasNext) {
- data(start + i) = source.next
+ data(start + i) = source.next()
i += 1
}
if (i == 0) -1 else i
@@ -51,7 +51,7 @@ object PagedSeq {
if (cnt == len) cnt
else (more(data, start + cnt, len - cnt) max 0) + cnt
} else if (source.hasNext) {
- current = source.next
+ current = source.next()
more(data, start, len)
} else -1
new PagedSeq(more(_: Array[Char], _: Int, _: Int))
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 802e16605d..243e3fcb91 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -77,6 +77,7 @@ extends scala.collection.AbstractSeq[Int]
final val terminalElement = start + numRangeElements * step
override def last = if (isEmpty) Nil.last else lastElement
+ override def head = if (isEmpty) Nil.head else start
override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
@@ -111,6 +112,7 @@ extends scala.collection.AbstractSeq[Int]
fail()
}
+ @deprecated("Range.foreach() is now self-contained, making this auxiliary method redundant.", "2.10.1")
def validateRangeBoundaries(f: Int => Any): Boolean = {
validateMaxLength()
@@ -133,14 +135,19 @@ extends scala.collection.AbstractSeq[Int]
}
@inline final override def foreach[@specialized(Unit) U](f: Int => U) {
- if (validateRangeBoundaries(f)) {
- var i = start
- val terminal = terminalElement
- val step = this.step
- while (i != terminal) {
- f(i)
- i += step
- }
+ validateMaxLength()
+ val isCommonCase = (start != Int.MinValue || end != Int.MinValue)
+ var i = start
+ var count = 0
+ val terminal = terminalElement
+ val step = this.step
+ while(
+ if(isCommonCase) { i != terminal }
+ else { count < numRangeElements }
+ ) {
+ f(i)
+ count += 1
+ i += step
}
}
@@ -325,7 +332,7 @@ object Range {
}
}
def count(start: Int, end: Int, step: Int): Int =
- count(start, end, step, false)
+ count(start, end, step, isInclusive = false)
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
// override def par = new ParRange(this)
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
deleted file mode 100644
index 9739e8f3f3..0000000000
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ /dev/null
@@ -1,293 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package collection
-package immutable
-
-/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
- *
- * Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information).
- *
- * @since 2.3
- */
-@deprecated("use `TreeMap` or `TreeSet` instead", "2.10.0")
-@SerialVersionUID(8691885935445612921L)
-abstract class RedBlack[A] extends Serializable {
-
- def isSmaller(x: A, y: A): Boolean
-
- private def blacken[B](t: Tree[B]): Tree[B] = t match {
- case RedTree(k, v, l, r) => BlackTree(k, v, l, r)
- case t => t
- }
- private def mkTree[B](isBlack: Boolean, k: A, v: B, l: Tree[B], r: Tree[B]) =
- if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r)
-
- abstract class Tree[+B] extends Serializable {
- def isEmpty: Boolean
- def isBlack: Boolean
- def lookup(x: A): Tree[B]
- def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v))
- def delete(k: A): Tree[B] = blacken(del(k))
- def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until))
- def foreach[U](f: (A, B) => U)
- def toStream: Stream[(A,B)]
- def iterator: Iterator[(A, B)]
- def upd[B1 >: B](k: A, v: B1): Tree[B1]
- def del(k: A): Tree[B]
- def smallest: NonEmpty[B]
- def rng(from: Option[A], until: Option[A]): Tree[B]
- def first : A
- def last : A
- def count : Int
- }
- abstract class NonEmpty[+B] extends Tree[B] with Serializable {
- def isEmpty = false
- def key: A
- def value: B
- def left: Tree[B]
- def right: Tree[B]
- def lookup(k: A): Tree[B] =
- if (isSmaller(k, key)) left.lookup(k)
- else if (isSmaller(key, k)) right.lookup(k)
- else this
- private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match {
- case RedTree(y, yv, RedTree(x, xv, a, b), c) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case RedTree(x, xv, a, RedTree(y, yv, b, c)) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case _ =>
- mkTree(isBlack, z, zv, l, d)
- }
- private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[B1], r: Tree[B1])/*: NonEmpty[B1]*/ = r match {
- case RedTree(z, zv, RedTree(y, yv, b, c), d) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case RedTree(y, yv, b, RedTree(z, zv, c, d)) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case _ =>
- mkTree(isBlack, x, xv, a, r)
- }
- def upd[B1 >: B](k: A, v: B1): Tree[B1] = {
- if (isSmaller(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right)
- else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v))
- else mkTree(isBlack, k, v, left, right)
- }
- // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
- // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
- def del(k: A): Tree[B] = {
- def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
- case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) =>
- RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d))
- case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) =>
- RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d))
- case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) =>
- RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d))
- case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) =>
- RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d))
- case (a, b) =>
- BlackTree(x, xv, a, b)
- }
- def subl(t: Tree[B]) = t match {
- case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b)
- case _ => sys.error("Defect: invariance violation; expected black, got "+t)
- }
- def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
- case (RedTree(y, yv, a, b), c) =>
- RedTree(x, xv, BlackTree(y, yv, a, b), c)
- case (bl, BlackTree(y, yv, a, b)) =>
- balance(x, xv, bl, RedTree(y, yv, a, b))
- case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) =>
- RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c)))
- case _ => sys.error("Defect: invariance violation at "+right)
- }
- def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
- case (a, RedTree(y, yv, b, c)) =>
- RedTree(x, xv, a, BlackTree(y, yv, b, c))
- case (BlackTree(y, yv, a, b), bl) =>
- balance(x, xv, RedTree(y, yv, a, b), bl)
- case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) =>
- RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl))
- case _ => sys.error("Defect: invariance violation at "+left)
- }
- def delLeft = left match {
- case _: BlackTree[_] => balLeft(key, value, left.del(k), right)
- case _ => RedTree(key, value, left.del(k), right)
- }
- def delRight = right match {
- case _: BlackTree[_] => balRight(key, value, left, right.del(k))
- case _ => RedTree(key, value, left, right.del(k))
- }
- def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match {
- case (Empty, t) => t
- case (t, Empty) => t
- case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) =>
- append(b, c) match {
- case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d))
- case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d))
- }
- case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) =>
- append(b, c) match {
- case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d))
- case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d))
- }
- case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c)
- case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c))
- }
- // RedBlack is neither A : Ordering[A], nor A <% Ordered[A]
- k match {
- case _ if isSmaller(k, key) => delLeft
- case _ if isSmaller(key, k) => delRight
- case _ => append(left, right)
- }
- }
-
- def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest
-
- def toStream: Stream[(A,B)] =
- left.toStream ++ Stream((key,value)) ++ right.toStream
-
- def iterator: Iterator[(A, B)] =
- left.iterator ++ Iterator.single(Pair(key, value)) ++ right.iterator
-
- def foreach[U](f: (A, B) => U) {
- left foreach f
- f(key, value)
- right foreach f
- }
-
- override def rng(from: Option[A], until: Option[A]): Tree[B] = {
- if (from == None && until == None) return this
- if (from != None && isSmaller(key, from.get)) return right.rng(from, until);
- if (until != None && (isSmaller(until.get,key) || !isSmaller(key,until.get)))
- return left.rng(from, until);
- val newLeft = left.rng(from, None)
- val newRight = right.rng(None, until)
- if ((newLeft eq left) && (newRight eq right)) this
- else if (newLeft eq Empty) newRight.upd(key, value);
- else if (newRight eq Empty) newLeft.upd(key, value);
- else rebalance(newLeft, newRight)
- }
-
- // The zipper returned might have been traversed left-most (always the left child)
- // or right-most (always the right child). Left trees are traversed right-most,
- // and right trees are traversed leftmost.
-
- // Returns the zipper for the side with deepest black nodes depth, a flag
- // indicating whether the trees were unbalanced at all, and a flag indicating
- // whether the zipper was traversed left-most or right-most.
-
- // If the trees were balanced, returns an empty zipper
- private[this] def compareDepth(left: Tree[B], right: Tree[B]): (List[NonEmpty[B]], Boolean, Boolean, Int) = {
- // Once a side is found to be deeper, unzip it to the bottom
- def unzip(zipper: List[NonEmpty[B]], leftMost: Boolean): List[NonEmpty[B]] = {
- val next = if (leftMost) zipper.head.left else zipper.head.right
- next match {
- case node: NonEmpty[_] => unzip(node :: zipper, leftMost)
- case Empty => zipper
- }
- }
-
- // Unzip left tree on the rightmost side and right tree on the leftmost side until one is
- // found to be deeper, or the bottom is reached
- def unzipBoth(left: Tree[B],
- right: Tree[B],
- leftZipper: List[NonEmpty[B]],
- rightZipper: List[NonEmpty[B]],
- smallerDepth: Int): (List[NonEmpty[B]], Boolean, Boolean, Int) = (left, right) match {
- case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) =>
- unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1)
- case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) =>
- unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth)
- case (_, r @ RedTree(_, _, _, _)) =>
- unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth)
- case (l @ RedTree(_, _, _, _), _) =>
- unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth)
- case (Empty, Empty) =>
- (Nil, true, false, smallerDepth)
- case (Empty, r @ BlackTree(_, _, _, _)) =>
- val leftMost = true
- (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth)
- case (l @ BlackTree(_, _, _, _), Empty) =>
- val leftMost = false
- (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth)
- }
- unzipBoth(left, right, Nil, Nil, 0)
- }
-
- private[this] def rebalance(newLeft: Tree[B], newRight: Tree[B]) = {
- // This is like drop(n-1), but only counting black nodes
- def findDepth(zipper: List[NonEmpty[B]], depth: Int): List[NonEmpty[B]] = zipper match {
- case BlackTree(_, _, _, _) :: tail =>
- if (depth == 1) zipper else findDepth(tail, depth - 1)
- case _ :: tail => findDepth(tail, depth)
- case Nil => sys.error("Defect: unexpected empty zipper while computing range")
- }
-
- // Blackening the smaller tree avoids balancing problems on union;
- // this can't be done later, though, or it would change the result of compareDepth
- val blkNewLeft = blacken(newLeft)
- val blkNewRight = blacken(newRight)
- val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight)
-
- if (levelled) {
- BlackTree(key, value, blkNewLeft, blkNewRight)
- } else {
- val zipFrom = findDepth(zipper, smallerDepth)
- val union = if (leftMost) {
- RedTree(key, value, blkNewLeft, zipFrom.head)
- } else {
- RedTree(key, value, zipFrom.head, blkNewRight)
- }
- val zippedTree = zipFrom.tail.foldLeft(union: Tree[B]) { (tree, node) =>
- if (leftMost)
- balanceLeft(node.isBlack, node.key, node.value, tree, node.right)
- else
- balanceRight(node.isBlack, node.key, node.value, node.left, tree)
- }
- zippedTree
- }
- }
- def first = if (left .isEmpty) key else left.first
- def last = if (right.isEmpty) key else right.last
- def count = 1 + left.count + right.count
- }
- case object Empty extends Tree[Nothing] {
- def isEmpty = true
- def isBlack = true
- def lookup(k: A): Tree[Nothing] = this
- def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty)
- def del(k: A): Tree[Nothing] = this
- def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map")
- def iterator: Iterator[(A, Nothing)] = Iterator.empty
- def toStream: Stream[(A,Nothing)] = Stream.empty
-
- def foreach[U](f: (A, Nothing) => U) {}
-
- def rng(from: Option[A], until: Option[A]) = this
- def first = throw new NoSuchElementException("empty map")
- def last = throw new NoSuchElementException("empty map")
- def count = 0
- }
- case class RedTree[+B](override val key: A,
- override val value: B,
- override val left: Tree[B],
- override val right: Tree[B]) extends NonEmpty[B] {
- def isBlack = false
- }
- case class BlackTree[+B](override val key: A,
- override val value: B,
- override val left: Tree[B],
- override val right: Tree[B]) extends NonEmpty[B] {
- def isBlack = true
- }
-}
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 0254e9ca3a..37b8ecfbc4 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -18,19 +18,19 @@ import scala.annotation.meta.getter
/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
*
* Implementation note: since efficiency is important for data structures this implementation
- * uses <code>null</code> to represent empty trees. This also means pattern matching cannot
+ * uses `null` to represent empty trees. This also means pattern matching cannot
* easily be used. The API represented by the RedBlackTree object tries to hide these
* optimizations behind a reasonably clean API.
*
* @since 2.10
*/
-private[immutable]
+private[collection]
object RedBlackTree {
def isEmpty(tree: Tree[_, _]): Boolean = tree eq null
- def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null
- def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match {
+ def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null
+ def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match {
case null => None
case tree => Some(tree.value)
}
@@ -44,8 +44,27 @@ object RedBlackTree {
}
def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
- def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
- def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
+ /**
+ * Count all the nodes with keys greater than or equal to the lower bound and less than the upper bound.
+ * The two bounds are optional.
+ */
+ def countInRange[A](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int =
+ if (tree eq null) 0 else
+ (from, to) match {
+ // with no bounds use this node's count
+ case (None, None) => tree.count
+ // if node is less than the lower bound, try the tree on the right, it might be in range
+ case (Some(lb), _) if ordering.lt(tree.key, lb) => countInRange(tree.right, from, to)
+ // if node is greater than or equal to the upper bound, try the tree on the left, it might be in range
+ case (_, Some(ub)) if ordering.gteq(tree.key, ub) => countInRange(tree.left, from, to)
+ // node is in range so the tree on the left will all be less than the upper bound and the tree on the
+ // right will all be greater than or equal to the lower bound. So 1 for this node plus
+ // count the subtrees by stripping off the bounds that we don't need any more
+ case _ => 1 + countInRange(tree.left, from, None) + countInRange(tree.right, None, to)
+
+ }
+ def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
+ def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k))
def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
case (Some(from), Some(until)) => this.range(tree, from, until)
case (Some(from), None) => this.from(tree, from)
@@ -74,20 +93,26 @@ object RedBlackTree {
result
}
- def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) {
- if (tree.left ne null) foreach(tree.left, f)
+
+ def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f)
+
+ private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U) {
+ if (tree.left ne null) _foreach(tree.left, f)
f((tree.key, tree.value))
- if (tree.right ne null) foreach(tree.right, f)
+ if (tree.right ne null) _foreach(tree.right, f)
}
- def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) {
- if (tree.left ne null) foreachKey(tree.left, f)
- f(tree.key)
- if (tree.right ne null) foreachKey(tree.right, f)
+
+ def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f)
+
+ private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U) {
+ if (tree.left ne null) _foreachKey(tree.left, f)
+ f((tree.key))
+ if (tree.right ne null) _foreachKey(tree.right, f)
}
- def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree)
- def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree)
- def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree)
+ def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start)
+ def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start)
+ def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start)
@tailrec
def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -227,7 +252,7 @@ object RedBlackTree {
if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
val newLeft = doFrom(tree.left, from)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -235,7 +260,7 @@ object RedBlackTree {
if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
val newRight = doTo(tree.right, to)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -243,18 +268,18 @@ object RedBlackTree {
if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
val newRight = doUntil(tree.right, until)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
if (tree eq null) return null
- if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until);
- if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until);
+ if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until)
+ if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until)
val newLeft = doFrom(tree.left, from)
val newRight = doUntil(tree.right, until)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
- else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, newRight)
}
@@ -265,7 +290,7 @@ object RedBlackTree {
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -275,7 +300,7 @@ object RedBlackTree {
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, overwrite = false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
@@ -286,8 +311,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false)
- else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, overwrite = false)
+ else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, newRight)
}
@@ -419,32 +444,28 @@ object RedBlackTree {
def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right))
}
- private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] {
+ private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(implicit ordering: Ordering[A]) extends Iterator[R] {
protected[this] def nextResult(tree: Tree[A, B]): R
- override def hasNext: Boolean = next ne null
+ override def hasNext: Boolean = lookahead ne null
- override def next: R = next match {
+ override def next: R = lookahead match {
case null =>
throw new NoSuchElementException("next on empty iterator")
case tree =>
- next = findNext(tree.right)
+ lookahead = findLeftMostOrPopOnEmpty(goRight(tree))
nextResult(tree)
}
@tailrec
- private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = {
- if (tree eq null) popPath()
+ private[this] def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) popNext()
else if (tree.left eq null) tree
- else {
- pushPath(tree)
- findNext(tree.left)
- }
- }
+ else findLeftMostOrPopOnEmpty(goLeft(tree))
- private[this] def pushPath(tree: Tree[A, B]) {
+ private[this] def pushNext(tree: Tree[A, B]) {
try {
- path(index) = tree
+ stackOfNexts(index) = tree
index += 1
} catch {
case _: ArrayIndexOutOfBoundsException =>
@@ -456,17 +477,17 @@ object RedBlackTree {
* An exception handler is used instead of an if-condition to optimize the normal path.
* This makes a large difference in iteration speed!
*/
- assert(index >= path.length)
- path :+= null
- pushPath(tree)
+ assert(index >= stackOfNexts.length)
+ stackOfNexts :+= null
+ pushNext(tree)
}
}
- private[this] def popPath(): Tree[A, B] = if (index == 0) null else {
+ private[this] def popNext(): Tree[A, B] = if (index == 0) null else {
index -= 1
- path(index)
+ stackOfNexts(index)
}
- private[this] var path = if (tree eq null) null else {
+ private[this] var stackOfNexts = if (root eq null) null else {
/*
* According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
* the maximum height of a red-black tree is 2*log_2(n + 2) - 2.
@@ -475,22 +496,45 @@ object RedBlackTree {
*
* We also don't store the deepest nodes in the path so the maximum path length is further reduced by one.
*/
- val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1
+ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - 1
new Array[Tree[A, B]](maximumHeight)
}
private[this] var index = 0
- private[this] var next: Tree[A, B] = findNext(tree)
+ private[this] var lookahead: Tree[A, B] = start map startFrom getOrElse findLeftMostOrPopOnEmpty(root)
+
+ /**
+ * Find the leftmost subtree whose key is equal to the given key, or if no such thing,
+ * the leftmost subtree with the key that would be "next" after it according
+ * to the ordering. Along the way build up the iterator's path stack so that "next"
+ * functionality works.
+ */
+ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else {
+ @tailrec def find(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) popNext()
+ else find(
+ if (ordering.lteq(key, tree.key)) goLeft(tree)
+ else goRight(tree)
+ )
+ find(root)
+ }
+
+ private[this] def goLeft(tree: Tree[A, B]) = {
+ pushNext(tree)
+ tree.left
+ }
+
+ private[this] def goRight(tree: Tree[A, B]) = tree.right
}
- private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) {
+ private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) {
override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value)
}
- private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) {
+ private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) {
override def nextResult(tree: Tree[A, B]) = tree.key
}
- private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) {
+ private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) {
override def nextResult(tree: Tree[A, B]) = tree.value
}
}
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index eb04231c55..73cc55df00 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -82,11 +82,17 @@ self =>
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
+ override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p
+ override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
}
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))}
+ override def keysIteratorFrom(start : A) = self keysIteratorFrom start
+ override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f
}
}
@@ -106,13 +112,13 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
val b = SortedMap.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
override def - (key: A): SortedMap[A, B] = {
val b = newBuilder
for (kv <- this; if kv._1 != key) b += kv
- b.result
+ b.result()
}
}
}
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 5bb4ef5f21..0770bd3175 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -181,6 +181,7 @@ import scala.language.implicitConversions
* @define coll stream
* @define orderDependent
* @define orderDependentFold
+ * @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections.
*/
abstract class Stream[+A] extends AbstractSeq[A]
with LinearSeq[A]
@@ -286,9 +287,8 @@ self =>
len
}
- /** It's an imperfect world, but at least we can bottle up the
- * imperfection in a capsule.
- */
+ // It's an imperfect world, but at least we can bottle up the
+ // imperfection in a capsule.
@inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That]
@inline private def asStream[B](x: AnyRef): Stream[B] = x.asInstanceOf[Stream[B]]
@inline private def isStreamBuilder[B, That](bf: CanBuildFrom[Stream[A], B, That]) =
@@ -385,12 +385,17 @@ self =>
// 1) stackoverflows (could be achieved with tailrec, too)
// 2) out of memory errors for big streams (`this` reference can be eliminated from the stack)
var rest: Stream[A] = this
- while (rest.nonEmpty && !pf.isDefinedAt(rest.head)) rest = rest.tail
+
+ // Avoids calling both `pf.isDefined` and `pf.apply`.
+ var newHead: B = null.asInstanceOf[B]
+ val runWith = pf.runWith((b: B) => newHead = b)
+
+ while (rest.nonEmpty && !runWith(rest.head)) rest = rest.tail
// without the call to the companion object, a thunk is created for the tail of the new stream,
// and the closure of the thunk will reference `this`
if (rest.isEmpty) Stream.Empty.asInstanceOf[That]
- else Stream.collectedTail(rest, pf, bf).asInstanceOf[That]
+ else Stream.collectedTail(newHead, rest, pf, bf).asInstanceOf[That]
}
}
@@ -725,10 +730,15 @@ self =>
* // produces: "5, 6, 7, 8, 9"
* }}}
*/
- override def take(n: Int): Stream[A] =
+ override def take(n: Int): Stream[A] = (
+ // Note that the n == 1 condition appears redundant but is not.
+ // It prevents "tail" from being referenced (and its head being evaluated)
+ // when obtaining the last element of the result. Such are the challenges
+ // of working with a lazy-but-not-really sequence.
if (n <= 0 || isEmpty) Stream.empty
else if (n == 1) cons(head, Stream.empty)
else cons(head, tail take n-1)
+ )
@tailrec final override def drop(n: Int): Stream[A] =
if (n <= 0 || isEmpty) this
@@ -784,8 +794,23 @@ self =>
these
}
- // there's nothing we can do about dropRight, so we just keep the definition
- // in LinearSeq
+ /**
+ * @inheritdoc
+ * $willTerminateInf
+ */
+ override def dropRight(n: Int): Stream[A] = {
+ // We make dropRight work for possibly infinite streams by carrying
+ // a buffer of the dropped size. As long as the buffer is full and the
+ // rest is non-empty, we can feed elements off the buffer head. When
+ // the rest becomes empty, the full buffer is the dropped elements.
+ def advance(stub0: List[A], stub1: List[A], rest: Stream[A]): Stream[A] = {
+ if (rest.isEmpty) Stream.empty
+ else if (stub0.isEmpty) advance(stub1.reverse, Nil, rest)
+ else cons(stub0.head, advance(stub0.tail, rest.head :: stub1, rest.tail))
+ }
+ if (n <= 0) this
+ else advance((this take n).toList, Nil, this drop n)
+ }
/** Returns the longest prefix of this `Stream` whose elements satisfy the
* predicate `p`.
@@ -973,7 +998,7 @@ final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterat
def hasNext: Boolean = these.v.nonEmpty
def next(): A =
- if (isEmpty) Iterator.empty.next
+ if (isEmpty) Iterator.empty.next()
else {
val cur = these.v
val result = cur.head
@@ -1149,8 +1174,8 @@ object Stream extends SeqFactory[Stream] {
cons(stream.head, stream.tail filter p)
}
- private[immutable] def collectedTail[A, B, That](stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
- cons(pf(stream.head), stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
+ private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
+ cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
}
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index edea89b555..389e1579f2 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -9,7 +9,6 @@
package scala.collection
package immutable
-import generic._
import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
@@ -19,12 +18,11 @@ import scala.reflect.ClassTag
* @since 2.8
*/
object StringLike {
-
// just statics for companion class.
- private final val LF: Char = 0x0A
- private final val FF: Char = 0x0C
- private final val CR: Char = 0x0D
- private final val SU: Char = 0x1A
+ private final val LF = 0x0A
+ private final val FF = 0x0C
+ private final val CR = 0x0D
+ private final val SU = 0x1A
}
import StringLike._
@@ -60,8 +58,8 @@ self =>
val start = from max 0
val end = until min length
- if (start >= end) newBuilder.result
- else (newBuilder ++= toString.substring(start, end)).result
+ if (start >= end) newBuilder.result()
+ else (newBuilder ++= toString.substring(start, end)).result()
}
/** Return the current string concatenated `n` times.
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index a650d98697..16c1f96cc2 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala.collection
package immutable
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 5b4db2686a..5085039da5 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -51,9 +51,6 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
with MapLike[A, B, TreeMap[A, B]]
with Serializable {
- @deprecated("use `ordering.lt` instead", "2.10.0")
- def isSmaller(x: A, y: A) = ordering.lt(x, y)
-
override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] =
TreeMap.newBuilder[A, B]
@@ -111,7 +108,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
var result = 0
val it = iterator
- while (it.hasNext && p(it.next)) result += 1
+ while (it.hasNext && p(it.next())) result += 1
result
}
override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p))
@@ -131,7 +128,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
* @param value the value to be associated with `key`
* @return a new $coll with the updated binding
*/
- override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true))
+ override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, overwrite = true))
/** Add a key/value pair to this map.
* @tparam B1 type of the value of the new binding, a supertype of `B`
@@ -171,7 +168,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
assert(!RB.contains(tree, key))
- new TreeMap(RB.update(tree, key, value, true))
+ new TreeMap(RB.update(tree, key, value, overwrite = true))
}
def - (key:A): TreeMap[A, B] =
@@ -192,9 +189,13 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
* @return the new iterator
*/
override def iterator: Iterator[(A, B)] = RB.iterator(tree)
+ override def iteratorFrom(start: A): Iterator[(A, B)] = RB.iterator(tree, Some(start))
override def keysIterator: Iterator[A] = RB.keysIterator(tree)
+ override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
+
override def valuesIterator: Iterator[B] = RB.valuesIterator(tree)
+ override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start))
override def contains(key: A): Boolean = RB.contains(tree, key)
override def isDefinedAt(key: A): Boolean = RB.contains(tree, key)
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 494776587d..e25d16408a 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -89,16 +89,13 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
private[this] def countWhile(p: A => Boolean): Int = {
var result = 0
val it = iterator
- while (it.hasNext && p(it.next)) result += 1
+ while (it.hasNext && p(it.next())) result += 1
result
}
override def dropWhile(p: A => Boolean) = drop(countWhile(p))
override def takeWhile(p: A => Boolean) = take(countWhile(p))
override def span(p: A => Boolean) = splitAt(countWhile(p))
- @deprecated("use `ordering.lt` instead", "2.10.0")
- def isSmaller(x: A, y: A) = compare(x,y) < 0
-
def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t)
@@ -112,7 +109,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
* @param elem a new element to add.
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
- def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false))
+ def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), overwrite = false))
/** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
@@ -122,7 +119,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
*/
def insert(elem: A): TreeSet[A] = {
assert(!RB.contains(tree, elem))
- newSet(RB.update(tree, elem, (), false))
+ newSet(RB.update(tree, elem, (), overwrite = false))
}
/** Creates a new `TreeSet` with the entry removed.
@@ -147,6 +144,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
* @return the new iterator
*/
def iterator: Iterator[A] = RB.keysIterator(tree)
+ override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
override def foreach[U](f: A => U) = RB.foreachKey(tree, f)
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index ae427852d4..dbe013d6e8 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -46,7 +46,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray
}).asInstanceOf[Array[Iterable[T]]]
- private type SplitIterators = ((Iterator[T], Int), Iterator[T])
+ private[this] type SplitIterators = ((Iterator[T], Int), Iterator[T])
private def isTrie(x: AnyRef) = x match {
case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true
@@ -94,7 +94,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
def hasNext = (subIter ne null) || depth >= 0
def next(): T = {
if (subIter ne null) {
- val el = subIter.next
+ val el = subIter.next()
if (!subIter.hasNext)
subIter = null
el
@@ -135,7 +135,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
}
else {
subIter = m.iterator
- next
+ next()
}
// The much slower version:
//
@@ -177,7 +177,6 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
if (depth > 0) {
// 2) topmost comes before (is not) arrayD
// steal a portion of top to create a new iterator
- val topmost = arrayStack(0)
if (posStack(0) == arrayStack(0).length - 1) {
// 2a) only a single entry left on top
// this means we have to modify this iterator - pop topmost
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index bcce4a99bd..571e6775c8 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -18,16 +18,7 @@ import scala.collection.parallel.immutable.ParVector
/** Companion object to the Vector class
*/
-object Vector extends SeqFactory[Vector] {
- // left lying around for binary compatibility check
- private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {
- override def apply() = newBuilder[Nothing]
- }
- // left lying around for binary compatibility check
- private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
-
- override lazy val ReusableCBF =
- scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+object Vector extends IndexedSeqFactory[Vector] {
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
@@ -113,7 +104,7 @@ override def companion: GenericCompanion[Vector] = Vector
if (0 < i) {
i -= 1
self(i)
- } else Iterator.empty.next
+ } else Iterator.empty.next()
}
// TODO: reverse
@@ -148,7 +139,7 @@ override def companion: GenericCompanion[Vector] = Vector
if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf
else super.+:(elem)(bf)
- override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf
else super.:+(elem)(bf)
@@ -251,8 +242,8 @@ override def companion: GenericCompanion[Vector] = Vector
private[immutable] def appendFront[B>:A](value: B): Vector[B] = {
if (endIndex != startIndex) {
- var blockIndex = (startIndex - 1) & ~31
- var lo = (startIndex - 1) & 31
+ val blockIndex = (startIndex - 1) & ~31
+ val lo = (startIndex - 1) & 31
if (startIndex != blockIndex + 32) {
val s = new Vector(startIndex - 1, endIndex, blockIndex)
@@ -270,7 +261,7 @@ override def companion: GenericCompanion[Vector] = Vector
//println("----- appendFront " + value + " at " + (startIndex - 1) + " reached block start")
if (shift != 0) {
// case A: we can shift right on the top level
- debug
+ debug()
//println("shifting right by " + shiftBlocks + " at level " + (depth-1) + " (had "+freeSpace+" free space)")
if (depth > 1) {
@@ -280,7 +271,7 @@ override def companion: GenericCompanion[Vector] = Vector
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks
- s.debug
+ s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing
s.display0(lo) = value.asInstanceOf[AnyRef]
//assert(depth == s.depth)
@@ -298,7 +289,7 @@ override def companion: GenericCompanion[Vector] = Vector
s.shiftTopLevel(0, shiftBlocks) // shift right by n elements
s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing
s.display0(shift-1) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
s
}
} else if (blockIndex < 0) {
@@ -313,10 +304,10 @@ override def companion: GenericCompanion[Vector] = Vector
val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex)
s.initFrom(this)
s.dirty = dirty
- s.debug
+ s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch
s.display0(lo) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
//assert(s.depth == depth+1)
s
} else {
@@ -348,8 +339,8 @@ override def companion: GenericCompanion[Vector] = Vector
// //println("------- append " + value)
// debug()
if (endIndex != startIndex) {
- var blockIndex = endIndex & ~31
- var lo = endIndex & 31
+ val blockIndex = endIndex & ~31
+ val lo = endIndex & 31
if (endIndex != blockIndex) {
//println("will make writable block (from "+focus+") at: " + blockIndex)
@@ -366,7 +357,7 @@ override def companion: GenericCompanion[Vector] = Vector
//println("----- appendBack " + value + " at " + endIndex + " reached block end")
if (shift != 0) {
- debug
+ debug()
//println("shifting left by " + shiftBlocks + " at level " + (depth-1) + " (had "+startIndex+" free space)")
if (depth > 1) {
val newBlockIndex = blockIndex - shift
@@ -375,10 +366,10 @@ override def companion: GenericCompanion[Vector] = Vector
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks
- s.debug
+ s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(lo) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
//assert(depth == s.depth)
s
} else {
@@ -394,7 +385,7 @@ override def companion: GenericCompanion[Vector] = Vector
s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements
s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(32 - shift) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
s
}
} else {
@@ -409,7 +400,7 @@ override def companion: GenericCompanion[Vector] = Vector
//assert(s.depth == depth+1) might or might not create new level!
if (s.depth == depth+1) {
//println("creating new level " + s.depth + " (had "+0+" free space)")
- s.debug
+ s.debug()
}
s
}
@@ -583,9 +574,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
private def dropFront0(cutIndex: Int): Vector[A] = {
- var blockIndex = cutIndex & ~31
- var lo = cutIndex & 31
-
+ val blockIndex = cutIndex & ~31
val xor = cutIndex ^ (endIndex - 1)
val d = requiredDepth(xor)
val shift = (cutIndex & ~((1 << (5*d))-1))
@@ -615,9 +604,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
private def dropBack0(cutIndex: Int): Vector[A] = {
- var blockIndex = (cutIndex - 1) & ~31
- var lo = ((cutIndex - 1) & 31) + 1
-
+ val blockIndex = (cutIndex - 1) & ~31
val xor = startIndex ^ (cutIndex - 1)
val d = requiredDepth(xor)
val shift = (startIndex & ~((1 << (5*d))-1))
@@ -639,14 +626,13 @@ override def companion: GenericCompanion[Vector] = Vector
}
-class VectorIterator[+A](_startIndex: Int, _endIndex: Int)
+class VectorIterator[+A](_startIndex: Int, endIndex: Int)
extends AbstractIterator[A]
with Iterator[A]
with VectorPointer[A @uncheckedVariance] {
private var blockIndex: Int = _startIndex & ~31
private var lo: Int = _startIndex & 31
- private var endIndex: Int = _endIndex
private var endLo = math.min(endIndex - blockIndex, 32)
@@ -676,13 +662,13 @@ extends AbstractIterator[A]
res
}
- private[collection] def remainingElementCount: Int = (_endIndex - (blockIndex + lo)) max 0
+ private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0
/** Creates a new vector which consists of elements remaining in this iterator.
* Such a vector can then be split into several vectors using methods like `take` and `drop`.
*/
private[collection] def remainingVector: Vector[A] = {
- val v = new Vector(blockIndex + lo, _endIndex, blockIndex + lo)
+ val v = new Vector(blockIndex + lo, endIndex, blockIndex + lo)
v.initFrom(this)
v
}
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
deleted file mode 100644
index ed0c1b3736..0000000000
--- a/src/library/scala/collection/immutable/package.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-
-package immutable {
- /** It looks like once upon a time this was used by ParRange, but
- * since December 2010 in r23721 it is not used by anything. We
- * should not have public API traits with seductive names like
- * "RangeUtils" which are neither documented nor used.
- */
- @deprecated("this class will be removed", "2.10.0")
- trait RangeUtils[+Repr <: RangeUtils[Repr]] {
- def start: Int
- def end: Int
- def step: Int
- def inclusive: Boolean
- def create(_start: Int, _end: Int, _step: Int, _inclusive: Boolean): Repr
-
- private final def inclusiveLast: Int = {
- val size = end.toLong - start.toLong
- (size / step.toLong * step.toLong + start.toLong).toInt
- }
-
- final def _last: Int = (
- if (!inclusive) {
- if (step == 1 || step == -1) end - step
- else {
- val inclast = inclusiveLast
- if ((end.toLong - start.toLong) % step == 0) inclast - step else inclast
- }
- }
- else if (step == 1 || step == -1) end
- else inclusiveLast
- )
-
- final def _foreach[U](f: Int => U) = if (_length > 0) {
- var i = start
- val last = _last
- while (i != last) {
- f(i)
- i += step
- }
- }
-
- final def _length: Int = (
- if (!inclusive) {
- if (end > start == step > 0 && start != end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt
- else {
- if (end > start == step > 0 || start == end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt
- )
-
- final def _apply(idx: Int): Int = {
- if (idx < 0 || idx >= _length) throw new IndexOutOfBoundsException(idx.toString)
- start + idx * step
- }
-
- private def locationAfterN(n: Int) = (
- if (n > 0) {
- if (step > 0)
- scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
- else
- scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
- }
- else start
- )
-
- final def _take(n: Int) = (
- if (n > 0 && _length > 0)
- create(start, locationAfterN(n), step, true)
- else
- create(start, start, step, false)
- )
-
- final def _drop(n: Int) = create(locationAfterN(n), end, step, inclusive)
- final def _slice(from: Int, until: Int) = _drop(from)._take(until - from)
- }
-}
-
-package object immutable {
- /** Nothing left after I promoted RangeUtils to the package. */
-}
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index 157e5dae62..dd7a94d677 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -15,7 +15,7 @@ package mutable
* An immutable AVL Tree implementation used by mutable.TreeSet
*
* @author Lucien Pereira
- *
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
*/
private[mutable] sealed trait AVLTree[+A] extends Serializable {
def balance: Int
@@ -65,13 +65,19 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
}
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
+ */
private case object Leaf extends AVLTree[Nothing] {
override val balance: Int = 0
override val depth: Int = -1
}
-private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] {
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
+ */
+private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] {
override val balance: Int = right.depth - left.depth
override val depth: Int = math.max(left.depth, right.depth) + 1
@@ -205,6 +211,9 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
}
}
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
+ */
private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
val stack = mutable.ArrayStack[Node[A]](root)
diveLeft()
@@ -220,11 +229,11 @@ private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
private def engageRight(): Unit = {
if (Leaf != stack.head.right) {
val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
- stack.pop
+ stack.pop()
stack.push(right)
diveLeft()
} else
- stack.pop
+ stack.pop()
}
override def hasNext: Boolean = !stack.isEmpty
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 0ce2cda32c..2fe3e91d68 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import generic._
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 31f3d2a497..40017aa08e 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import generic._
/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out most
* operations on arrays and wrapped arrays.
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 25ba7e4ce6..4c996bfb88 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -52,6 +52,20 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
super.toArray[U]
}
+ def :+[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
+ val result = Array.ofDim[B](repr.length + 1)
+ Array.copy(repr, 0, result, 0, repr.length)
+ result(repr.length) = elem
+ result
+ }
+
+ def +:[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
+ val result = Array.ofDim[B](repr.length + 1)
+ result(0) = elem
+ Array.copy(repr, 0, result, 1, repr.length)
+ result
+ }
+
override def par = ParArray.handoff(repr)
/** Flattens a two-dimensional array by concatenating all its rows
@@ -66,7 +80,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
b ++= asTrav(xs)
- b.result
+ b.result()
}
/** Transposes a two dimensional array.
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 33f6949662..334b26ae03 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -90,7 +90,7 @@ extends AbstractSeq[A]
}
override def clone(): ArraySeq[A] = {
- val cloned = array.clone.asInstanceOf[Array[AnyRef]]
+ val cloned = array.clone().asInstanceOf[Array[AnyRef]]
new ArraySeq[A](length) {
override val array = cloned
}
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 670558ab06..e05d668519 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -150,7 +150,7 @@ extends AbstractSeq[T]
*
* @param f The function to drain to.
*/
- def drain(f: T => Unit) = while (!isEmpty) f(pop)
+ def drain(f: T => Unit) = while (!isEmpty) f(pop())
/** Pushes all the provided elements in the traversable object onto the stack.
*
@@ -190,7 +190,7 @@ extends AbstractSeq[T]
*
* @param f The function to apply to the top two elements.
*/
- def combine(f: (T, T) => T): Unit = push(f(pop, pop))
+ def combine(f: (T, T) => T): Unit = push(f(pop(), pop()))
/** Repeatedly combine the top elements of the stack until the stack contains only
* one element.
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 5935a2858a..322522fdd2 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -198,7 +198,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
case Remove(Index(n), x) => if (this(n) == x) remove(n)
case Remove(NoLo, x) => this -= x
- case Reset() => clear
+ case Reset() => clear()
case s: Script[_] => s.iterator foreach <<
case _ => throw new UnsupportedOperationException("message " + cmd + " not understood")
}
@@ -260,6 +260,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
override def clone(): This = {
val bf = newBuilder
bf ++= this
- bf.result.asInstanceOf[This]
+ bf.result().asInstanceOf[This]
}
}
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 37aa1862fa..d3f96f69ad 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import generic._
import script._
/** This is a simple proxy class for <a href="Buffer.html"
@@ -127,7 +124,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
/** Clears the buffer contents.
*/
- def clear() { self.clear }
+ def clear() { self.clear() }
/** Send a message to this scriptable object.
*
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 5c0681df1d..75560580cc 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -121,7 +121,7 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this }
override def sizeHint(size: Int) = self.sizeHint(size)
override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl)
- def result: NewTo = f(self.result)
+ def result: NewTo = f(self.result())
}
}
diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala
deleted file mode 100644
index 5b5d738d03..0000000000
--- a/src/library/scala/collection/mutable/ConcurrentMap.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package mutable
-
-/** A template trait for mutable maps that allow concurrent access.
- *
- * $concurrentmapinfo
- *
- * @since 2.8
- * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]]
- * section on `Concurrent Maps` for more information.
- *
- * @tparam A the key type of the map
- * @tparam B the value type of the map
- *
- * @define Coll `ConcurrentMap`
- * @define coll concurrent map
- * @define concurrentmapinfo
- * This is a base trait for all Scala concurrent map implementations. It
- * provides all of the methods a `Map` does, with the difference that all the
- * changes are atomic. It also describes methods specific to concurrent maps.
- *
- * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values.
- *
- * @define atomicop
- * This is an atomic operation.
- */
-@deprecated("Use `scala.collection.concurrent.Map` instead.", "2.10.0")
-trait ConcurrentMap[A, B] extends Map[A, B] {
-
- /**
- * Associates the given key with a given value, unless the key was already
- * associated with some other value.
- *
- * $atomicop
- *
- * @param k key with which the specified value is to be associated with
- * @param v value to be associated with the specified key
- * @return `Some(oldvalue)` if there was a value `oldvalue` previously
- * associated with the specified key, or `None` if there was no
- * mapping for the specified key
- */
- def putIfAbsent(k: A, v: B): Option[B]
-
- /**
- * Removes the entry for the specified key if its currently mapped to the
- * specified value.
- *
- * $atomicop
- *
- * @param k key for which the entry should be removed
- * @param v value expected to be associated with the specified key if
- * the removal is to take place
- * @return `true` if the removal took place, `false` otherwise
- */
- def remove(k: A, v: B): Boolean
-
- /**
- * Replaces the entry for the given key only if it was previously mapped to
- * a given value.
- *
- * $atomicop
- *
- * @param k key for which the entry should be replaced
- * @param oldvalue value expected to be associated with the specified key
- * if replacing is to happen
- * @param newvalue value to be associated with the specified key
- * @return `true` if the entry was replaced, `false` otherwise
- */
- def replace(k: A, oldvalue: B, newvalue: B): Boolean
-
- /**
- * Replaces the entry for the given key only if it was previously mapped
- * to some value.
- *
- * $atomicop
- *
- * @param k key for which the entry should be replaced
- * @param v value to be associated with the specified key
- * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
- */
- def replace(k: A, v: B): Option[B]
-}
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 18a1e234f6..a106794912 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -68,7 +68,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
override def clone(): DoubleLinkedList[A] = {
val builder = newBuilder
builder ++= this
- builder.result
+ builder.result()
}
}
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 91e95e039b..49a9427588 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -17,7 +17,6 @@ package mutable
* hash table as an implementation.
*
* @define coll flat hash table
- * @define cannotStoreNull '''Note''': A $coll cannot store `null` elements.
* @since 2.3
* @tparam A the type of the elements contained in the $coll.
*/
@@ -78,7 +77,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
assert(size >= 0)
table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
- threshold = newThreshold(_loadFactor, table.size)
+ threshold = newThreshold(_loadFactor, table.length)
seedvalue = in.readInt()
@@ -87,9 +86,9 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
var index = 0
while (index < size) {
- val elem = in.readObject().asInstanceOf[A]
+ val elem = entryToElem(in.readObject())
f(elem)
- addEntry(elem)
+ addElem(elem)
index += 1
}
}
@@ -109,61 +108,78 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Finds an entry in the hash table if such an element exists. */
- protected def findEntry(elem: A): Option[A] = {
- val entry = findEntryImpl(elem)
- if (null == entry) None else Some(entry.asInstanceOf[A])
- }
+ protected def findEntry(elem: A): Option[A] =
+ findElemImpl(elem) match {
+ case null => None
+ case entry => Some(entryToElem(entry))
+ }
+
/** Checks whether an element is contained in the hash table. */
- protected def containsEntry(elem: A): Boolean = {
- null != findEntryImpl(elem)
+ protected def containsElem(elem: A): Boolean = {
+ null != findElemImpl(elem)
}
- private def findEntryImpl(elem: A): AnyRef = {
- var h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry && entry != elem) {
+ private def findElemImpl(elem: A): AnyRef = {
+ val searchEntry = elemToEntry(elem)
+ var h = index(searchEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry && curEntry != searchEntry) {
h = (h + 1) % table.length
- entry = table(h)
+ curEntry = table(h)
}
- entry
+ curEntry
}
- /** Add entry if not yet in table.
- * @return Returns `true` if a new entry was added, `false` otherwise.
+ /** Add elem if not yet in table.
+ * @return Returns `true` if a new elem was added, `false` otherwise.
+ */
+ protected def addElem(elem: A) : Boolean = {
+ addEntry(elemToEntry(elem))
+ }
+
+ /**
+ * Add an entry (an elem converted to an entry via elemToEntry) if not yet in
+ * table.
+ * @return Returns `true` if a new elem was added, `false` otherwise.
*/
- protected def addEntry(elem: A) : Boolean = {
- var h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry) {
- if (entry == elem) return false
+ protected def addEntry(newEntry : AnyRef) : Boolean = {
+ var h = index(newEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry) {
+ if (curEntry == newEntry) return false
h = (h + 1) % table.length
- entry = table(h)
+ curEntry = table(h)
//Statistics.collisions += 1
}
- table(h) = elem.asInstanceOf[AnyRef]
+ table(h) = newEntry
tableSize = tableSize + 1
nnSizeMapAdd(h)
if (tableSize >= threshold) growTable()
true
+
}
- /** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
- protected def removeEntry(elem: A) : Option[A] = {
+ /**
+ * Removes an elem from the hash table returning true if the element was found (and thus removed)
+ * or false if it didn't exist.
+ */
+ protected def removeElem(elem: A) : Boolean = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
val d = table.length >> 1
if (i <= j) j - i < d
else i - j > d
}
- var h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry) {
- if (entry == elem) {
+ val removalEntry = elemToEntry(elem)
+ var h = index(removalEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry) {
+ if (curEntry == removalEntry) {
var h0 = h
var h1 = (h0 + 1) % table.length
while (null != table(h1)) {
- val h2 = index(elemHashCode(table(h1).asInstanceOf[A]))
+ val h2 = index(table(h1).hashCode)
//Console.println("shift at "+h1+":"+table(h1)+" with h2 = "+h2+"? "+(h2 != h1)+precedes(h2, h0)+table.length)
if (h2 != h1 && precedes(h2, h0)) {
//Console.println("shift "+h1+" to "+h0+"!")
@@ -176,12 +192,12 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
tableSize -= 1
nnSizeMapRemove(h0)
if (tableDebug) checkConsistent()
- return Some(entry.asInstanceOf[A])
+ return true
}
h = (h + 1) % table.length
- entry = table(h)
+ curEntry = table(h)
}
- None
+ false
}
protected def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -191,8 +207,8 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
i < table.length
}
def next(): A =
- if (hasNext) { i += 1; table(i - 1).asInstanceOf[A] }
- else Iterator.empty.next
+ if (hasNext) { i += 1; entryToElem(table(i - 1)) }
+ else Iterator.empty.next()
}
private def growTable() {
@@ -205,7 +221,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
var i = 0
while (i < oldtable.length) {
val entry = oldtable(i)
- if (null != entry) addEntry(entry.asInstanceOf[A])
+ if (null != entry) addEntry(entry)
i += 1
}
if (tableDebug) checkConsistent()
@@ -213,9 +229,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
private def checkConsistent() {
for (i <- 0 until table.length)
- if (table(i) != null && !containsEntry(table(i).asInstanceOf[A]))
- assert(false, i+" "+table(i)+" "+table.mkString)
+ if (table(i) != null && !containsElem(entryToElem(table(i))))
+ assert(assertion = false, i+" "+table(i)+" "+table.mkString)
}
+
/* Size map handling code */
@@ -265,7 +282,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
val totalbuckets = totalSizeMapBuckets
var bucketidx = 0
var tableidx = 0
- var tbl = table
+ val tbl = table
var tableuntil = sizeMapBucketSize min tbl.length
while (bucketidx < totalbuckets) {
var currbucketsz = 0
@@ -341,7 +358,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
seedvalue = c.seedvalue
sizemap = c.sizemap
}
- if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
+ if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild()
}
}
@@ -358,6 +375,11 @@ private[collection] object FlatHashTable {
final def seedGenerator = new ThreadLocal[scala.util.Random] {
override def initialValue = new scala.util.Random
}
+
+ private object NullSentinel {
+ override def hashCode = 0
+ override def toString = "NullSentinel"
+ }
/** The load factor for the hash table; must be < 500 (0.5)
*/
@@ -386,10 +408,6 @@ private[collection] object FlatHashTable {
// so that:
protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize
- protected def elemHashCode(elem: A) =
- if (elem == null) throw new IllegalArgumentException("Flat hash tables cannot contain null elements.")
- else elem.hashCode()
-
protected final def improve(hcode: Int, seed: Int) = {
//var h: Int = hcode + ~(hcode << 9)
//h = h ^ (h >>> 14)
@@ -404,6 +422,19 @@ private[collection] object FlatHashTable {
val rotated = (improved >>> rotation) | (improved << (32 - rotation))
rotated
}
+
+ /**
+ * Elems have type A, but we store AnyRef in the table. Plus we need to deal with
+ * null elems, which need to be stored as NullSentinel
+ */
+ protected final def elemToEntry(elem : A) : AnyRef =
+ if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef]
+
+ /**
+ * Does the inverse translation of elemToEntry
+ */
+ protected final def entryToElem(entry : AnyRef) : A =
+ (if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A]
}
}
diff --git a/src/library/scala/collection/mutable/GenIterable.scala.disabled b/src/library/scala/collection/mutable/GenIterable.scala.disabled
deleted file mode 100644
index 9acfccdae8..0000000000
--- a/src/library/scala/collection/mutable/GenIterable.scala.disabled
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A base trait for iterable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $iterableInfo
- */
-trait GenIterable[A] extends GenTraversable[A]
- with scala.collection.GenIterable[A]
- with scala.collection.GenIterableLike[A, GenIterable[A]]
-// with GenericTraversableTemplate[A, GenIterable]
-{
- def seq: Iterable[A]
- //override def companion: GenericCompanion[GenIterable] = GenIterable
-}
-
-
-// object GenIterable extends TraversableFactory[GenIterable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/mutable/GenMap.scala.disabled b/src/library/scala/collection/mutable/GenMap.scala.disabled
deleted file mode 100644
index e4fd1dad64..0000000000
--- a/src/library/scala/collection/mutable/GenMap.scala.disabled
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A base trait for maps that can be mutated.
- * $possiblyparinfo
- * $mapNote
- * $mapTags
- * @since 1.0
- * @author Matthias Zenger
- */
-trait GenMap[A, B]
-extends GenIterable[(A, B)]
- with scala.collection.GenMap[A, B]
- with scala.collection.GenMapLike[A, B, GenMap[A, B]]
-{
- def seq: Map[A, B]
-}
-
-
-// object GenMap extends MapFactory[GenMap] {
-// def empty[A, B]: Map[A, B] = Map.empty
-
-// /** $mapCanBuildFromInfo */
-// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
-// }
-
diff --git a/src/library/scala/collection/mutable/GenSeq.scala.disabled b/src/library/scala/collection/mutable/GenSeq.scala.disabled
deleted file mode 100644
index ec904723a5..0000000000
--- a/src/library/scala/collection/mutable/GenSeq.scala.disabled
+++ /dev/null
@@ -1,44 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A subtrait of `collection.GenSeq` which represents sequences
- * that can be mutated.
- *
- * $possiblyparinfo
- *
- * $seqInfo
- *
- * The class adds an `update` method to `collection.Seq`.
- *
- * @define Coll `mutable.Seq`
- * @define coll mutable sequence
- */
-trait GenSeq[A] extends GenIterable[A]
- with scala.collection.GenSeq[A]
- with scala.collection.GenSeqLike[A, GenSeq[A]]
-// with GenericTraversableTemplate[A, GenSeq]
-{
- //override def companion: GenericCompanion[GenSeq] = GenSeq
- def seq: Seq[A]
-}
-
-
-// object GenSeq extends SeqFactory[GenSeq] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
-// }
-
diff --git a/src/library/scala/collection/mutable/GenSet.scala.disabled b/src/library/scala/collection/mutable/GenSet.scala.disabled
deleted file mode 100644
index dec20e2a46..0000000000
--- a/src/library/scala/collection/mutable/GenSet.scala.disabled
+++ /dev/null
@@ -1,46 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-
-import generic._
-
-
-/** A generic trait for mutable sets.
- *
- * $possiblyparinfo
- * $setNote
- * $setTags
- *
- * @since 1.0
- * @author Matthias Zenger
- * @define Coll `mutable.Set`
- * @define coll mutable set
- */
-trait GenSet[A] extends GenIterable[A]
- with Growable[A]
- with scala.collection.GenSet[A]
- with scala.collection.GenSetLike[A, GenSet[A]]
-// with GenericSetTemplate[A, GenSet]
-{
- //override def companion: GenericCompanion[GenSet] = GenSet
- def seq: Set[A]
-}
-
-
-// object GenSet extends TraversableFactory[GenSet] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenSet[A]] = Set.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/mutable/GenTraversable.scala.disabled b/src/library/scala/collection/mutable/GenTraversable.scala.disabled
deleted file mode 100644
index 2453e2ce87..0000000000
--- a/src/library/scala/collection/mutable/GenTraversable.scala.disabled
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A trait for traversable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $traversableInfo
- * @define mutability mutable
- */
-trait GenTraversable[A] extends scala.collection.GenTraversable[A]
- with scala.collection.GenTraversableLike[A, GenTraversable[A]]
-// with GenericTraversableTemplate[A, GenTraversable]
- with Mutable
-{
- def seq: Traversable[A]
- //override def companion: GenericCompanion[GenTraversable] = GenTraversable
-}
-
-// object GenTraversable extends TraversableFactory[GenTraversable] {
-// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A] = Traversable.newBuilder
-// }
-
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 3cd7f07d83..692d6b8d6a 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -95,7 +95,7 @@ extends AbstractMap[A, B]
def iterator = entriesIterator map {e => (e.key, e.value)}
- override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
+ override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f((e.key, e.value)))
/* Override to avoid tuple allocation in foreach */
override def keySet: scala.collection.Set[A] = new DefaultKeySet {
@@ -111,21 +111,21 @@ extends AbstractMap[A, B]
override def keysIterator: Iterator[A] = new AbstractIterator[A] {
val iter = entriesIterator
def hasNext = iter.hasNext
- def next() = iter.next.key
+ def next() = iter.next().key
}
/* Override to avoid tuple allocation */
override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
val iter = entriesIterator
def hasNext = iter.hasNext
- def next() = iter.next.value
+ def next() = iter.next().value
}
/** Toggles whether a size map is used to track hash map statistics.
*/
def useSizeMap(t: Boolean) = if (t) {
- if (!isSizeMapDefined) sizeMapInitAndRebuild
- } else sizeMapDisable
+ if (!isSizeMapDefined) sizeMapInitAndRebuild()
+ } else sizeMapDisable()
protected def createNewEntry[B1](key: A, value: B1): Entry = {
new Entry(key, value.asInstanceOf[B])
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index c60e363f8f..753f7f8d01 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -16,8 +16,6 @@ import scala.collection.parallel.mutable.ParHashSet
/** This class implements mutable sets using a hashtable.
*
- * $cannotStoreNull
- *
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.0, 31/12/2006
@@ -55,17 +53,17 @@ extends AbstractSet[A]
override def size: Int = tableSize
- def contains(elem: A): Boolean = containsEntry(elem)
+ def contains(elem: A): Boolean = containsElem(elem)
- def += (elem: A): this.type = { addEntry(elem); this }
+ def += (elem: A): this.type = { addElem(elem); this }
- def -= (elem: A): this.type = { removeEntry(elem); this }
+ def -= (elem: A): this.type = { removeElem(elem); this }
override def par = new ParHashSet(hashTableContents)
- override def add(elem: A): Boolean = addEntry(elem)
+ override def add(elem: A): Boolean = addElem(elem)
- override def remove(elem: A): Boolean = removeEntry(elem).isDefined
+ override def remove(elem: A): Boolean = removeElem(elem)
override def clear() { clearTable() }
@@ -75,8 +73,8 @@ extends AbstractSet[A]
var i = 0
val len = table.length
while (i < len) {
- val elem = table(i)
- if (elem ne null) f(elem.asInstanceOf[A])
+ val curEntry = table(i)
+ if (curEntry ne null) f(entryToElem(curEntry))
i += 1
}
}
@@ -94,8 +92,8 @@ extends AbstractSet[A]
/** Toggles whether a size map is used to track hash map statistics.
*/
def useSizeMap(t: Boolean) = if (t) {
- if (!isSizeMapDefined) sizeMapInitAndRebuild
- } else sizeMapDisable
+ if (!isSizeMapDefined) sizeMapInitAndRebuild()
+ } else sizeMapDisable()
}
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 8fef1be66b..37d2b51a91 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -96,7 +96,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
- threshold = newThreshold(_loadFactor, table.size)
+ threshold = newThreshold(_loadFactor, table.length)
if (smDefined) sizeMapInit(table.length) else sizemap = null
@@ -365,7 +365,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
seedvalue = c.seedvalue
sizemap = c.sizemap
}
- if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
+ if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild()
}
private[collection] def hashTableContents = new HashTable.Contents(
@@ -382,7 +382,7 @@ private[collection] object HashTable {
/** The load factor for the hash table (in 0.001 step).
*/
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
- private[collection] final def loadFactorDenum = 1000;
+ private[collection] final def loadFactorDenum = 1000
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
@@ -457,13 +457,13 @@ private[collection] object HashTable {
*/
private[collection] def powerOfTwo(target: Int): Int = {
/* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
- var c = target - 1;
- c |= c >>> 1;
- c |= c >>> 2;
- c |= c >>> 4;
- c |= c >>> 8;
- c |= c >>> 16;
- c + 1;
+ var c = target - 1
+ c |= c >>> 1
+ c |= c >>> 2
+ c |= c >>> 4
+ c |= c >>> 8
+ c |= c >>> 16
+ c + 1
}
class Contents[A, Entry >: Null <: HashEntry[A, Entry]](
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index c1d94a904c..34e8f7d5b8 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/tPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/tPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -41,7 +41,7 @@ extends AbstractIterable[(Pub, Evt)]
*/
def notify(pub: Pub, event: Evt) {
if (log.length >= maxHistory)
- log.dequeue
+ log.dequeue()
log.enqueue((pub, event))
}
@@ -50,7 +50,7 @@ extends AbstractIterable[(Pub, Evt)]
def iterator: Iterator[(Pub, Evt)] = log.iterator
def events: Iterator[Evt] = log.iterator map (_._2)
- def clear() { log.clear }
+ def clear() { log.clear() }
/** Checks if two history objects are structurally identical.
*
@@ -60,5 +60,5 @@ extends AbstractIterable[(Pub, Evt)]
case that: History[_, _] => this.log equals that.log
case _ => false
}
- override def hashCode = log.hashCode
+ override def hashCode = log.hashCode()
}
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index f0c31ec7fb..7b582eb5cb 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import generic._
/** A subtrait of scala.collection.IndexedSeq which represents sequences
* that can be mutated.
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
index cb7e8efdc7..80b527a7b9 100755
--- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import generic._
/** A subtrait of scala.collection.IndexedSeq which represents sequences
* that can be mutated.
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index cf5166eea8..a88ed8f123 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -82,8 +82,6 @@ self =>
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
protected override def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
- private implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
-
override def filter(p: A => Boolean): This = newFiltered(p)
override def init: This = newSliced(SliceInterval(0, self.length - 1))
override def drop(n: Int): This = newSliced(SliceInterval(n, self.length))
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index da2c36ac2d..14f30d74e8 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -92,7 +92,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
def hasNext = cur ne null
def next =
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
@@ -118,7 +118,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
def hasNext = cur ne null
def next =
if (hasNext) { val res = cur.key; cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
@@ -126,7 +126,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
def hasNext = cur ne null
def next =
if (hasNext) { val res = cur.value; cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def foreach[U](f: ((A, B)) => U) {
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 1723258433..5641a78d46 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -78,7 +78,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
def hasNext = cur ne null
def next =
if (hasNext) { val res = cur.key; cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def foreach[U](f: A => U) {
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 4f63ede7ca..3003080060 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import generic._
import scala.annotation.tailrec
/** This extensible class may be used as a basis for implementing linked
@@ -188,6 +185,6 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
override def clone(): This = {
val bf = newBuilder
bf ++= this
- bf.result
+ bf.result()
}
}
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 67af4a6bd6..af1d7e4183 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
@@ -56,12 +54,18 @@ final class ListBuffer[A]
import scala.collection.Traversable
import scala.collection.immutable.ListSerializeEnd
+ /** Expected invariants:
+ * If start.isEmpty, last0 == null
+ * If start.nonEmpty, last0 != null
+ * If len == 0, start.isEmpty
+ * If len > 0, start.nonEmpty
+ */
private var start: List[A] = Nil
private var last0: ::[A] = _
private var exported: Boolean = false
private var len = 0
- protected def underlying: immutable.Seq[A] = start
+ protected def underlying: List[A] = start
private def writeObject(out: ObjectOutputStream) {
// write start
@@ -133,7 +137,7 @@ final class ListBuffer[A]
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString)
if (exported) copy()
if (n == 0) {
- val newElem = new :: (x, start.tail);
+ val newElem = new :: (x, start.tail)
if (last0 eq start) {
last0 = newElem
}
@@ -160,7 +164,7 @@ final class ListBuffer[A]
*/
def += (x: A): this.type = {
if (exported) copy()
- if (start.isEmpty) {
+ if (isEmpty) {
last0 = new :: (x, Nil)
start = last0
} else {
@@ -172,8 +176,11 @@ final class ListBuffer[A]
this
}
- override def ++=(xs: TraversableOnce[A]): this.type =
- if (xs.asInstanceOf[AnyRef] eq this) ++= (this take size) else super.++=(xs)
+ override def ++=(xs: TraversableOnce[A]): this.type = xs match {
+ case x: AnyRef if x eq this => this ++= (this take size)
+ case _ => super.++=(xs)
+
+ }
override def ++=:(xs: TraversableOnce[A]): this.type =
if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs)
@@ -182,6 +189,7 @@ final class ListBuffer[A]
*/
def clear() {
start = Nil
+ last0 = null
exported = false
len = 0
}
@@ -195,7 +203,7 @@ final class ListBuffer[A]
def +=: (x: A): this.type = {
if (exported) copy()
val newElem = new :: (x, start)
- if (start.isEmpty) last0 = newElem
+ if (isEmpty) last0 = newElem
start = newElem
len += 1
this
@@ -238,6 +246,15 @@ final class ListBuffer[A]
}
}
+ /** Reduce the length of the buffer, and null out last0
+ * if this reduces the length to 0.
+ */
+ private def reduceLengthBy(num: Int) {
+ len -= num
+ if (len <= 0) // obviously shouldn't be < 0, but still better not to leak
+ last0 = null
+ }
+
/** Removes a given number of elements on a given index position. May take
* time linear in the buffer size.
*
@@ -253,7 +270,6 @@ final class ListBuffer[A]
if (exported) copy()
val n1 = n max 0
val count1 = count min (len - n1)
- var old = start.head
if (n1 == 0) {
var c = count1
while (c > 0) {
@@ -274,7 +290,7 @@ final class ListBuffer[A]
c -= 1
}
}
- len -= count1
+ reduceLengthBy(count1)
}
// Implementation of abstract method in Builder
@@ -285,7 +301,7 @@ final class ListBuffer[A]
* copied lazily, the first time it is mutated.
*/
override def toList: List[A] = {
- exported = !start.isEmpty
+ exported = !isEmpty
start
}
@@ -296,7 +312,7 @@ final class ListBuffer[A]
* @param xs the list to which elements are prepended
*/
def prependToList(xs: List[A]): List[A] = {
- if (start.isEmpty) xs
+ if (isEmpty) xs
else {
if (exported) copy()
last0.tl = xs
@@ -331,7 +347,7 @@ final class ListBuffer[A]
if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]]
cursor.asInstanceOf[::[A]].tl = cursor.tail.tail
}
- len -= 1
+ reduceLengthBy(1)
old
}
@@ -343,11 +359,12 @@ final class ListBuffer[A]
*/
override def -= (elem: A): this.type = {
if (exported) copy()
- if (start.isEmpty) {}
+ if (isEmpty) {}
else if (start.head == elem) {
start = start.tail
- len -= 1
- } else {
+ reduceLengthBy(1)
+ }
+ else {
var cursor = start
while (!cursor.tail.isEmpty && cursor.tail.head != elem) {
cursor = cursor.tail
@@ -357,7 +374,7 @@ final class ListBuffer[A]
if (z.tl == last0)
last0 = z
z.tl = cursor.tail.tail
- len -= 1
+ reduceLengthBy(1)
}
}
this
@@ -397,6 +414,7 @@ final class ListBuffer[A]
/** Copy contents of this buffer */
private def copy() {
+ if (isEmpty) return
var cursor = start
val limit = last0.tail
clear()
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index a53aa3b76a..49d1e039f0 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -11,7 +11,7 @@ package scala.collection
package mutable
import generic._
-import scala.annotation.{migration, bridge}
+import scala.annotation.migration
import parallel.mutable.ParMap
/** A template trait for mutable maps.
@@ -50,8 +50,6 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
with Parallelizable[(A, B), ParMap[A, B]]
{ self =>
- import scala.collection.Traversable
-
/** A common implementation of `newBuilder` for all mutable maps
* in terms of `empty`.
*
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index bc6272bfdb..03110569c4 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -61,8 +61,7 @@ extends AbstractSeq[A]
tl
}
- // this method must be private for binary compatibility
- private final def tailImpl(tl: MutableList[A]) {
+ protected final def tailImpl(tl: MutableList[A]) {
require(nonEmpty, "tail of empty list")
tl.first0 = first0.tail
tl.len = len - 1
@@ -149,7 +148,7 @@ extends AbstractSeq[A]
override def clone(): MutableList[A] = {
val bf = newBuilder
bf ++= seq
- bf.result
+ bf.result()
}
}
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index bcaf977727..7a2fce9128 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -65,7 +65,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
}
abstract override def clear(): Unit = {
- super.clear
+ super.clear()
publish(new Reset with Undoable {
def undo() { throw new UnsupportedOperationException("cannot undo") }
})
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index d81c90bf4c..3544275300 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -60,7 +60,7 @@ trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with
}
abstract override def clear(): Unit = {
- super.clear
+ super.clear()
publish(new Reset with Undoable {
def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
})
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index 3e79506413..81580316ff 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -44,7 +44,7 @@ trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
}
abstract override def clear(): Unit = {
- super.clear
+ super.clear()
publish(new Reset with Undoable {
def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
})
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 8b3e52470a..a0aea43121 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -27,7 +27,7 @@ object OpenHashMap {
var value: Option[Value])
extends HashEntry[Key, OpenEntry[Key, Value]]
- private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1;
+ private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1
}
/** A mutable hash map based on an open hashing scheme. The precise scheme is
@@ -78,8 +78,8 @@ extends AbstractMap[Key, Value]
/** Returns a mangled hash code of the provided key. */
protected def hashOf(key: Key) = {
var h = key.##
- h ^= ((h >>> 20) ^ (h >>> 12));
- h ^ (h >>> 7) ^ (h >>> 4);
+ h ^= ((h >>> 20) ^ (h >>> 12))
+ h ^ (h >>> 7) ^ (h >>> 4)
}
private[this] def growTable() = {
@@ -89,7 +89,7 @@ extends AbstractMap[Key, Value]
table = new Array[Entry](newSize)
mask = newSize - 1
oldTable.foreach( entry =>
- if (entry != null && entry.value != None) addEntry(entry));
+ if (entry != null && entry.value != None) addEntry(entry))
deleted = 0
}
@@ -124,18 +124,18 @@ extends AbstractMap[Key, Value]
put(key, hashOf(key), value)
private def put(key: Key, hash: Int, value: Value): Option[Value] = {
- if (2 * (size + deleted) > mask) growTable
+ if (2 * (size + deleted) > mask) growTable()
val index = findIndex(key, hash)
val entry = table(index)
if (entry == null) {
- table(index) = new OpenEntry(key, hash, Some(value));
+ table(index) = new OpenEntry(key, hash, Some(value))
modCount += 1
size += 1
None
} else {
val res = entry.value
if (entry.value == None) { size += 1; modCount += 1 }
- entry.value = Some(value);
+ entry.value = Some(value)
res
}
}
@@ -161,13 +161,13 @@ extends AbstractMap[Key, Value]
while(entry != null){
if (entry.hash == hash &&
entry.key == key){
- return entry.value;
+ return entry.value
}
- j = 5 * j + 1 + perturb;
- perturb >>= 5;
- index = j & mask;
- entry = table(index);
+ j = 5 * j + 1 + perturb
+ perturb >>= 5
+ index = j & mask
+ entry = table(index)
}
None
}
@@ -182,8 +182,8 @@ extends AbstractMap[Key, Value]
val initialModCount = modCount
private[this] def advance() {
- if (initialModCount != modCount) sys.error("Concurrent modification");
- while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1;
+ if (initialModCount != modCount) sys.error("Concurrent modification")
+ while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1
}
def hasNext = {advance(); index <= mask }
@@ -198,7 +198,7 @@ extends AbstractMap[Key, Value]
override def clone() = {
val it = new OpenHashMap[Key, Value]
- foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
+ foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get))
it
}
@@ -213,24 +213,24 @@ extends AbstractMap[Key, Value]
* @param f The function to apply to each key, value mapping.
*/
override def foreach[U](f : ((Key, Value)) => U) {
- val startModCount = modCount;
+ val startModCount = modCount
foreachUndeletedEntry(entry => {
if (modCount != startModCount) sys.error("Concurrent Modification")
f((entry.key, entry.value.get))}
- );
+ )
}
private[this] def foreachUndeletedEntry(f : Entry => Unit){
- table.foreach(entry => if (entry != null && entry.value != None) f(entry));
+ table.foreach(entry => if (entry != null && entry.value != None) f(entry))
}
override def transform(f : (Key, Value) => Value) = {
- foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)));
+ foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)))
this
}
override def retain(f : (Key, Value) => Boolean) = {
- foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} );
+ foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} )
this
}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 84257c6e97..4e8b923155 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -134,11 +134,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
throw new NoSuchElementException("no element to remove from heap")
def dequeueAll[A1 >: A, That](implicit bf: CanBuildFrom[_, A1, That]): That = {
- val b = bf.apply
+ val b = bf.apply()
while (nonEmpty) {
b += dequeue()
}
- b.result
+ b.result()
}
/** Returns the element with the highest priority in the queue,
@@ -146,14 +146,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*
* @return the element with the highest priority.
*/
- @deprecated("Use `head` instead.", "2.9.0")
- def max: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
-
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
/** Removes all elements from the queue. After this operation is completed,
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index 3bb5d32cf8..ee54370731 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -66,7 +66,7 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
*
* @return the element with the highest priority.
*/
- override def dequeue(): A = self.dequeue
+ override def dequeue(): A = self.dequeue()
/** Returns the element with the highest priority in the queue,
* or throws an error if there is no element contained in the queue.
@@ -75,18 +75,10 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
*/
override def head: A = self.head
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
- @deprecated("Use `head` instead.", "2.9.0")
- override def max: A = self.max
-
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = self.clear
+ override def clear(): Unit = self.clear()
/** Returns a regular queue containing the same elements.
*/
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index e31205b477..8c2ef0d3a3 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -45,7 +45,7 @@ trait Publisher[Evt] {
def suspendSubscription(sub: Sub) { suspended += sub }
def activateSubscription(sub: Sub) { suspended -= sub }
def removeSubscription(sub: Sub) { filters -= sub }
- def removeSubscriptions() { filters.clear }
+ def removeSubscriptions() { filters.clear() }
protected def publish(event: Evt) {
filters.keys.foreach(sub =>
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 8ef5f6aeb7..f1a5723818 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -167,13 +167,6 @@ extends MutableList[A]
*/
def front: A = head
- // this method (duplicated from MutableList) must be private for binary compatibility
- private final def tailImpl(tl: Queue[A]) {
- require(nonEmpty, "tail of empty list")
- tl.first0 = first0.tail
- tl.len = len - 1
- tl.last0 = if (tl.len == 0) tl.first0 else last0
- }
// TODO - Don't override this just for new to create appropriate type....
override def tail: Queue[A] = {
@@ -185,7 +178,7 @@ extends MutableList[A]
override def clone(): Queue[A] = {
val bf = newBuilder
bf ++= seq
- bf.result
+ bf.result()
}
private[this] def decrementLength() {
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index c286a340e3..051b1219cd 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -67,7 +67,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
*
* @return the first element of the queue.
*/
- override def dequeue(): A = self.dequeue
+ override def dequeue(): A = self.dequeue()
/** Returns the first element in the queue, or throws an error if there
* is no element contained in the queue.
@@ -79,7 +79,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = self.clear
+ override def clear(): Unit = self.clear()
/** Returns an iterator over all elements on the queue.
*
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 5544a21a55..9b8554669b 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -30,7 +30,7 @@ class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Und
*/
def undo(): Unit = {
val old = log.toList.reverse
- clear
- old.foreach { case (sub, event) => event.undo }
+ clear()
+ old.foreach { case (sub, event) => event.undo() }
}
}
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index 447100cf4c..ddfde536c9 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -9,7 +9,6 @@
package scala.collection
package mutable
-import generic._
import parallel.mutable.ParSeq
/** A template trait for mutable sequences of type `mutable.Seq[A]`.
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 42fd651d41..40f0b8932c 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
package scala.collection
package mutable
-import generic._
-
/** The canonical builder for mutable Sets.
*
* @tparam A The type of the elements that will be contained in this set.
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 01f87447ae..8dfcde16ce 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -11,7 +11,7 @@ package mutable
import generic._
import script._
-import scala.annotation.{ migration, bridge }
+import scala.annotation.migration
import parallel.mutable.ParSet
/** A template trait for mutable sets of type `mutable.Set[A]`.
@@ -210,7 +210,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
def <<(cmd: Message[A]): Unit = cmd match {
case Include(_, x) => this += x
case Remove(_, x) => this -= x
- case Reset() => clear
+ case Reset() => clear()
case s: Script[_] => s.iterator foreach <<
case _ => throw new UnsupportedOperationException("message " + cmd + " not understood")
}
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 16f13ff42c..8792738339 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -69,13 +69,13 @@ trait StackProxy[A] extends Stack[A] with Proxy {
/** Removes the top element from the stack.
*/
- override def pop(): A = self.pop
+ override def pop(): A = self.pop()
/**
* Removes all elements from the stack. After this operation completed,
* the stack will be empty.
*/
- override def clear(): Unit = self.clear
+ override def clear(): Unit = self.clear()
/** Returns an iterator over all elements on the stack. This iterator
* is stable with respect to state changes in the stack object; i.e.
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index bf9a70c5b7..14ec85b906 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -157,7 +157,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
/** Clears the buffer contents.
*/
abstract override def clear(): Unit = synchronized {
- super.clear
+ super.clear()
}
override def <<(cmd: Message[A]): Unit = synchronized {
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 8dfc40b9c8..52e55677bd 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -64,7 +64,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
*
* @return the element with the highest priority.
*/
- override def dequeue(): A = synchronized { super.dequeue }
+ override def dequeue(): A = synchronized { super.dequeue() }
/** Returns the element with the highest priority in the queue,
* or throws an error if there is no element contained in the queue.
@@ -73,18 +73,10 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
*/
override def head: A = synchronized { super.head }
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
- @deprecated("Use `head` instead.", "2.9.0")
- override def max: A = synchronized { super.max }
-
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = synchronized { super.clear }
+ override def clear(): Unit = synchronized { super.clear() }
/** Returns an iterator which yield all the elements of the priority
* queue in descending priority order.
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 9559d5eaa5..57beab39b6 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -25,8 +25,6 @@ package mutable
* @define coll synchronized queue
*/
class SynchronizedQueue[A] extends Queue[A] {
- import scala.collection.Traversable
-
/** Checks if the queue is empty.
*
* @return true, iff there is no element in the queue.
@@ -58,7 +56,7 @@ class SynchronizedQueue[A] extends Queue[A] {
*
* @return the first element of the queue.
*/
- override def dequeue(): A = synchronized { super.dequeue }
+ override def dequeue(): A = synchronized { super.dequeue() }
/** Returns the first element in the queue which satisfies the
* given predicate, and removes this element from the queue.
@@ -87,7 +85,7 @@ class SynchronizedQueue[A] extends Queue[A] {
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = synchronized { super.clear }
+ override def clear(): Unit = synchronized { super.clear() }
/** Checks if two queues are structurally identical.
*
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index e4a44993ff..27a696895d 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -24,8 +24,6 @@ import script._
* @define coll synchronized set
*/
trait SynchronizedSet[A] extends Set[A] {
- import scala.collection.Traversable
-
abstract override def size: Int = synchronized {
super.size
}
@@ -71,7 +69,7 @@ trait SynchronizedSet[A] extends Set[A] {
}
abstract override def clear(): Unit = synchronized {
- super.clear
+ super.clear()
}
override def subsetOf(that: scala.collection.GenSet[A]) = synchronized {
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index 5d7c9f6073..09cdcca99e 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -67,13 +67,13 @@ class SynchronizedStack[A] extends Stack[A] {
/** Removes the top element from the stack.
*/
- override def pop(): A = synchronized { super.pop }
+ override def pop(): A = synchronized { super.pop() }
/**
* Removes all elements from the stack. After this operation completed,
* the stack will be empty.
*/
- override def clear(): Unit = synchronized { super.clear }
+ override def clear(): Unit = synchronized { super.clear() }
/** Returns an iterator over all elements on the stack. This iterator
* is stable with respect to state changes in the stack object; i.e.
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index 5197af1b04..147bc85383 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -10,6 +10,8 @@ package scala.collection
package mutable
import generic._
+import scala.collection.immutable.{RedBlackTree => RB}
+import scala.runtime.ObjectRef
/**
* @define Coll `mutable.TreeSet`
@@ -29,95 +31,81 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
}
/**
- * A mutable SortedSet using an immutable AVL Tree as underlying data structure.
+ * A mutable SortedSet using an immutable RedBlack Tree as underlying data structure.
*
* @author Lucien Pereira
*
*/
-class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
+class TreeSet[A] private (treeRef: ObjectRef[RB.Tree[A, Null]], from: Option[A], until: Option[A])(implicit val ordering: Ordering[A])
+ extends SortedSet[A] with SetLike[A, TreeSet[A]]
with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
- // Projection constructor
- private def this(base: Option[TreeSet[A]], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) {
- this();
- this.base = base
- this.from = from
- this.until = until
- }
-
- private var base: Option[TreeSet[A]] = None
-
- private var from: Option[A] = None
-
- private var until: Option[A] = None
-
- private var avl: AVLTree[A] = Leaf
-
- private var cardinality: Int = 0
+ def this()(implicit ordering: Ordering[A]) = this(new ObjectRef(null), None, None)
- def resolve: TreeSet[A] = base.getOrElse(this)
-
- private def isLeftAcceptable(from: Option[A], ordering: Ordering[A])(a: A): Boolean =
- from.map(x => ordering.gteq(a, x)).getOrElse(true)
-
- private def isRightAcceptable(until: Option[A], ordering: Ordering[A])(a: A): Boolean =
- until.map(x => ordering.lt(a, x)).getOrElse(true)
-
- /**
- * Cardinality store the set size, unfortunately a
- * set view (given by rangeImpl)
- * cannot take advantage of this optimisation
- *
- */
- override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
+ override def size: Int = RB.countInRange(treeRef.elem, from, until)
override def stringPrefix = "TreeSet"
override def empty: TreeSet[A] = TreeSet.empty
- override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSet(Some(this), from, until)
+ private def pickBound(comparison: (A, A) => A, oldBound: Option[A], newBound: Option[A]) = (newBound, oldBound) match {
+ case (Some(newB), Some(oldB)) => Some(comparison(newB, oldB))
+ case (None, _) => oldBound
+ case _ => newBound
+ }
+
+ override def rangeImpl(fromArg: Option[A], untilArg: Option[A]): TreeSet[A] = {
+ val newFrom = pickBound(ordering.max, fromArg, from)
+ val newUntil = pickBound(ordering.min, untilArg, until)
+
+ new TreeSet(treeRef, newFrom, newUntil)
+ }
override def -=(elem: A): this.type = {
- try {
- resolve.avl = resolve.avl.remove(elem, ordering)
- resolve.cardinality = resolve.cardinality - 1
- } catch {
- case e: NoSuchElementException => ()
- }
+ treeRef.elem = RB.delete(treeRef.elem, elem)
this
}
override def +=(elem: A): this.type = {
- try {
- resolve.avl = resolve.avl.insert(elem, ordering)
- resolve.cardinality = resolve.cardinality + 1
- } catch {
- case e: IllegalArgumentException => ()
- }
+ treeRef.elem = RB.update(treeRef.elem, elem, null, overwrite = false)
this
}
/**
* Thanks to the immutable nature of the
- * underlying AVL Tree, we can share it with
+ * underlying Tree, we can share it with
* the clone. So clone complexity in time is O(1).
*
*/
- override def clone(): TreeSet[A] = {
- val clone = new TreeSet[A](base, from, until)
- clone.avl = resolve.avl
- clone.cardinality = resolve.cardinality
- clone
- }
+ override def clone(): TreeSet[A] =
+ new TreeSet[A](new ObjectRef(treeRef.elem), from, until)
+
+ private val notProjection = !(from.isDefined || until.isDefined)
override def contains(elem: A): Boolean = {
- isLeftAcceptable(from, ordering)(elem) &&
- isRightAcceptable(until, ordering)(elem) &&
- resolve.avl.contains(elem, ordering)
- }
+ def leftAcceptable: Boolean = from match {
+ case Some(lb) => ordering.gteq(elem, lb)
+ case _ => true
+ }
- override def iterator: Iterator[A] = resolve.avl.iterator
- .dropWhile(e => !isLeftAcceptable(from, ordering)(e))
- .takeWhile(e => isRightAcceptable(until, ordering)(e))
+ def rightAcceptable: Boolean = until match {
+ case Some(ub) => ordering.lt(elem, ub)
+ case _ => true
+ }
+
+ (notProjection || (leftAcceptable && rightAcceptable)) &&
+ RB.contains(treeRef.elem, elem)
+ }
+ override def iterator: Iterator[A] = iteratorFrom(None)
+
+ override def keysIteratorFrom(start: A) = iteratorFrom(Some(start))
+
+ private def iteratorFrom(start: Option[A]) = {
+ val it = RB.keysIterator(treeRef.elem, pickBound(ordering.max, from, start))
+ until match {
+ case None => it
+ case Some(ub) => it takeWhile (k => ordering.lt(k, ub))
+ }
+ }
}
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 9b48c8f24f..ac634f43aa 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -87,7 +87,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
// `that` is no longer usable, so clear it
// here we rely on the fact that `clear` allocates
// new nodes instead of modifying the previous ones
- that.clear
+ that.clear()
// return a reference to this
this
@@ -123,7 +123,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
val r = node.array(pos)
scan()
r
- } else Iterator.empty.next
+ } else Iterator.empty.next()
}
// this should be faster than the iterator
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index 7e0210311c..55328a5d3d 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -11,7 +11,6 @@
package scala.collection
package mutable
-import generic._
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime._
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index 00993c09ff..00e20e7616 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -86,7 +86,7 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
* if this is applicable.
*/
def resultWithTaskSupport: To = {
- val res = result
+ val res = result()
setTaskSupport(res, combinerTaskSupport)
}
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 2b24c88139..f170b944eb 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -11,7 +11,6 @@ package scala.collection.parallel
import scala.collection.GenIterable
import scala.collection.generic._
import scala.collection.parallel.mutable.ParArrayCombiner
-import scala.collection.parallel.mutable.ParArray
/** A template trait for parallel iterable collections.
*
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 0f06ff37af..961556faff 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -171,9 +171,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** The task support object which is responsible for scheduling and
* load-balancing tasks to processors.
- *
+ *
* @see [[scala.collection.parallel.TaskSupport]]
- */
+ */
def tasksupport = {
val ts = _tasksupport
if (ts eq null) {
@@ -188,18 +188,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
* A task support object can be changed in a parallel collection after it
* has been created, but only during a quiescent period, i.e. while there
* are no concurrent invocations to parallel collection methods.
- *
- * Here is a way to change the task support of a parallel collection:
- *
- * {{{
- * import scala.collection.parallel._
- * val pc = mutable.ParArray(1, 2, 3)
- * pc.tasksupport = new ForkJoinTaskSupport(
- * new scala.concurrent.forkjoin.ForkJoinPool(2))
- * }}}
+ *
+ * Here is a way to change the task support of a parallel collection:
+ *
+ * {{{
+ * import scala.collection.parallel._
+ * val pc = mutable.ParArray(1, 2, 3)
+ * pc.tasksupport = new ForkJoinTaskSupport(
+ * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * }}}
*
* @see [[scala.collection.parallel.TaskSupport]]
- */
+ */
def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
def seq: Sequential
@@ -214,7 +214,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def nonEmpty = size != 0
- def head = iterator.next
+ def head = iterator.next()
def headOption = if (nonEmpty) Some(head) else None
@@ -433,12 +433,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam S the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
- * `Nil` for list concatenation or `0` for summation)
+ * `Nil` for list concatenation or `0` for summation) and may be evaluated
+ * more than once
* @param seqop an operator used to accumulate results within a partition
* @param combop an associative operator used to combine results from different partitions
*/
- def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- tasksupport.executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
+ def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
+ tasksupport.executeAndWaitResult(new Aggregate(() => z, seqop, combop, splitter))
}
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
@@ -453,7 +454,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
- /** Applies a function `f` to all the elements of $coll in a undefined order.
+ /** Applies a function `f` to all the elements of $coll in an undefined order.
*
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
@@ -626,7 +627,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val b = bf(repr)
this.splitter.copy2builder[U, That, Builder[U, That]](b)
for (elem <- that.seq) b += elem
- setTaskSupport(b.result, tasksupport)
+ setTaskSupport(b.result(), tasksupport)
}
}
@@ -727,7 +728,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult {
cb => cb.resultWithTaskSupport
})
- }) else setTaskSupport((bf(repr) += z).result, tasksupport)
+ }) else setTaskSupport((bf(repr) += z).result(), tasksupport)
} else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
} else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
@@ -819,10 +820,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport });
+ tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport })
} else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport)
- def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
+ def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false)
def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
@@ -830,11 +831,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
_.resultWithTaskSupport
}
- );
+ )
} else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport)
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport });
+ tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport })
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
@@ -877,13 +878,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
-
+
override def toVector: Vector[T] = to[Vector]
override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
toParCollection[T, Col[T]](() => cbf().asCombiner)
} else seq.to(cbf)
-
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
@@ -903,7 +904,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel)
def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure
- private[parallel] override def signalAbort = pit.abort
+ private[parallel] override def signalAbort = pit.abort()
override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")"
}
@@ -920,8 +921,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
def combineResults(fr: FR, sr: SR): R
@volatile var result: R = null.asInstanceOf[R]
private[parallel] override def signalAbort() {
- ft.signalAbort
- st.signalAbort
+ ft.signalAbort()
+ st.signalAbort()
}
protected def mergeSubtasks() {
ft mergeThrowables st
@@ -935,9 +936,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
(f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- tasksupport.executeAndWaitResult(ft)
- tasksupport.executeAndWaitResult(st)
- mergeSubtasks
+ tasksupport.executeAndWaitResult(ft) : Any
+ tasksupport.executeAndWaitResult(st) : Any
+ mergeSubtasks()
}
}
@@ -946,10 +947,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
(f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- val ftfuture = tasksupport.execute(ft)
- tasksupport.executeAndWaitResult(st)
+ val ftfuture: () => Any = tasksupport.execute(ft)
+ tasksupport.executeAndWaitResult(st) : Any
ftfuture()
- mergeSubtasks
+ mergeSubtasks()
}
}
@@ -962,7 +963,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
result = map(initialResult)
}
private[parallel] override def signalAbort() {
- inner.signalAbort
+ inner.signalAbort()
}
override def requiresStrictSplitters = inner.requiresStrictSplitters
}
@@ -1005,10 +1006,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Fold[U]) = result = op(result, that.result)
}
- protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
+ protected[this] class Aggregate[S](z: () => S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
extends Accessor[S, Aggregate[S]] {
@volatile var result: S = null.asInstanceOf[S]
- def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
+ def leaf(prevr: Option[S]) = result = pit.foldLeft(z())(seqop)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p)
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
@@ -1084,7 +1085,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
extends Accessor[Boolean, Forall] {
@volatile var result: Boolean = true
- def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
+ def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort() }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
@@ -1092,7 +1093,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
extends Accessor[Boolean, Exists] {
@volatile var result: Boolean = false
- def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
+ def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort() }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
@@ -1100,7 +1101,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
extends Accessor[Option[U], Find[U]] {
@volatile var result: Option[U] = None
- def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
+ def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort() }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
@@ -1152,7 +1153,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
// note: HashMapCombiner doesn't merge same keys until evaluation
val cb = mcf()
while (pit.hasNext) {
- val elem = pit.next
+ val elem = pit.next()
cb += f(elem) -> elem
}
result = cb
@@ -1473,9 +1474,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
/* alias methods */
- def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
+ def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
- def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
+ def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
/* debug information */
@@ -1488,7 +1489,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def debugBuffer: ArrayBuffer[String] = null
private[parallel] def debugclear() = synchronized {
- debugBuffer.clear
+ debugBuffer.clear()
}
private[parallel] def debuglog(s: String) = synchronized {
@@ -1504,31 +1505,3 @@ self: ParIterableLike[T, Repr, Sequential] =>
})
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 0ecd6bd9ec..0567e7b396 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -50,7 +50,8 @@ extends GenIterableView[T, Coll]
self =>
override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
- override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
+ override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner")
+
protected[this] def viewIdentifier: String
protected[this] def viewIdString: String
@@ -130,7 +131,7 @@ self =>
override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
+ newZipped(ParRange(0, splitter.remaining, 1, inclusive = false)).asInstanceOf[That]
override def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That =
newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That]
@@ -139,7 +140,7 @@ self =>
} otherwise {
val b = bf(underlying)
b ++= this.iterator
- b.result
+ b.result()
}
/* wrapper virtual ctors */
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 56594bec96..798ba71b95 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -67,7 +67,7 @@ self =>
i =>
val iter = s
def hasNext = iter.hasNext
- def next() = iter.next._1
+ def next() = iter.next()._1
def split = {
val ss = iter.split.map(keysIterator(_))
ss.foreach { _.signalDelegate = i.signalDelegate }
@@ -84,7 +84,7 @@ self =>
i =>
val iter = s
def hasNext = iter.hasNext
- def next() = iter.next._2
+ def next() = iter.next()._2
def split = {
val ss = iter.split.map(valuesIterator(_))
ss.foreach { _.signalDelegate = i.signalDelegate }
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index b905d1d41f..dee523ad89 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -18,9 +18,6 @@ import scala.collection.generic.ParFactory
import scala.collection.generic.CanCombineFrom
import scala.collection.GenSeq
import scala.collection.parallel.mutable.ParArrayCombiner
-import scala.collection.parallel.mutable.ParArray
-
-
/** A template trait for parallel sequences.
*
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 201b624c72..68bc1bc12c 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -44,8 +44,8 @@ trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, S
extends scala.collection.GenSeqLike[T, Repr]
with ParIterableLike[T, Repr, Sequential] {
self =>
-
- type SuperParIterator = IterableSplitter[T]
+
+ protected[this] type SuperParIterator = IterableSplitter[T]
/** A more refined version of the iterator found in the `ParallelIterable` trait,
* this iterator can be split into arbitrary subsets of iterators.
@@ -68,7 +68,7 @@ self =>
val x = self(i)
i += 1
x
- } else Iterator.empty.next
+ } else Iterator.empty.next()
def head = self(i)
@@ -228,7 +228,7 @@ self =>
b ++= pits(0)
b ++= patch
b ++= pits(2)
- setTaskSupport(b.result, tasksupport)
+ setTaskSupport(b.result(), tasksupport)
}
def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
@@ -252,7 +252,7 @@ self =>
def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
patch(length, new immutable.Repetition(elem, len - length), 0)
- } else patch(length, Nil, 0);
+ } else patch(length, Nil, 0)
override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
@@ -260,7 +260,7 @@ self =>
new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
_.resultWithTaskSupport
}
- );
+ )
} else super.zip(that)(bf)
/** Tests whether every element of this $coll relates to the
@@ -423,7 +423,7 @@ self =>
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
result = pit.sameElements(otherpit)
- if (!result) pit.abort
+ if (!result) pit.abort()
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
@@ -471,7 +471,7 @@ self =>
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
result = pit.corresponds(corr)(otherpit)
- if (!result) pit.abort
+ if (!result) pit.abort()
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala
index 3e3c497352..9acc4b0b73 100644
--- a/src/library/scala/collection/parallel/ParSeqView.scala
+++ b/src/library/scala/collection/parallel/ParSeqView.scala
@@ -6,10 +6,9 @@
** |/ **
\* */
-
package scala.collection.parallel
-import scala.collection.{ TraversableView, SeqView, Parallel, Iterator }
+import scala.collection.{ SeqView, Parallel, Iterator }
import scala.collection.generic.CanCombineFrom
/** A template view of a non-strict view of a parallel sequence.
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index 04369d8fde..f3dbe20e67 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -125,8 +125,8 @@ self =>
}
protected def newReversed: Transformed[T] = new Reversed { }
protected def newPatched[U >: T](_from: Int, _patch: GenSeq[U], _replaced: Int): Transformed[U] = new {
- val from = _from;
- val patch = _patch;
+ val from = _from
+ val patch = _patch
val replaced = _replaced
} with Patched[U]
@@ -147,7 +147,7 @@ self =>
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
+ newZipped(ParRange(0, splitter.remaining, 1, inclusive = false)).asInstanceOf[That]
override def reverse: This = newReversed.asInstanceOf[This]
override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = reverse.map(f)
@@ -173,7 +173,7 @@ self =>
} otherwise {
val b = bf(underlying)
b ++= this.iterator
- b.result
+ b.result()
}
/* tasks */
diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala
index 6e5e9b4387..bc6d5c6245 100644
--- a/src/library/scala/collection/parallel/ParSet.scala
+++ b/src/library/scala/collection/parallel/ParSet.scala
@@ -17,14 +17,8 @@ package scala.collection.parallel
import scala.collection.Set
import scala.collection.GenSet
-import scala.collection.mutable.Builder
import scala.collection.generic._
-
-
-
-
-
/** A template trait for parallel sets.
*
* $sideeffects
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index c80b5ded26..20a5f693ce 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -15,14 +15,6 @@ import scala.collection.SetLike
import scala.collection.GenSetLike
import scala.collection.GenSet
import scala.collection.Set
-import scala.collection.mutable.Builder
-
-
-
-
-
-
-
/** A template trait for parallel sets. This trait is mixed in with concrete
* parallel sets to override the representation type.
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 3150b0d763..a3a47e2e40 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -47,47 +47,47 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
override def count(p: T => Boolean): Int = {
var i = 0
- while (hasNext) if (p(next)) i += 1
+ while (hasNext) if (p(next())) i += 1
i
}
override def reduce[U >: T](op: (U, U) => U): U = {
- var r: U = next
- while (hasNext) r = op(r, next)
+ var r: U = next()
+ while (hasNext) r = op(r, next())
r
}
override def fold[U >: T](z: U)(op: (U, U) => U): U = {
var r = z
- while (hasNext) r = op(r, next)
+ while (hasNext) r = op(r, next())
r
}
override def sum[U >: T](implicit num: Numeric[U]): U = {
var r: U = num.zero
- while (hasNext) r = num.plus(r, next)
+ while (hasNext) r = num.plus(r, next())
r
}
override def product[U >: T](implicit num: Numeric[U]): U = {
var r: U = num.one
- while (hasNext) r = num.times(r, next)
+ while (hasNext) r = num.times(r, next())
r
}
override def min[U >: T](implicit ord: Ordering[U]): T = {
- var r = next
+ var r = next()
while (hasNext) {
- val curr = next
+ val curr = next()
if (ord.lteq(curr, r)) r = curr
}
r
}
override def max[U >: T](implicit ord: Ordering[U]): T = {
- var r = next
+ var r = next()
while (hasNext) {
- val curr = next
+ val curr = next()
if (ord.gteq(curr, r)) r = curr
}
r
@@ -97,16 +97,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
var i = from
val until = from + len
while (i < until && hasNext) {
- array(i) = next
+ array(i) = next()
i += 1
}
}
def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = {
var i = howmany - 1
- var u: U = next
+ var u: U = next()
while (i > 0 && hasNext) {
- u = op(u, next)
+ u = op(u, next())
i -= 1
}
u
@@ -117,15 +117,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
if (isRemainingCheap) cb.sizeHint(remaining)
- while (hasNext) cb += f(next)
+ while (hasNext) cb += f(next())
cb
}
def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
+ val runWith = pf.runWith(cb += _)
while (hasNext) {
- val curr = next
- if (pf.isDefinedAt(curr)) cb += pf(curr)
+ val curr = next()
+ runWith(curr)
}
cb
}
@@ -133,7 +134,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
while (hasNext) {
- val traversable = f(next).seq
+ val traversable = f(next()).seq
if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
else cb ++= traversable
}
@@ -148,7 +149,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
while (hasNext) {
- val curr = next
+ val curr = next()
if (pred(curr)) cb += curr
}
cb
@@ -156,7 +157,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
while (hasNext) {
- val curr = next
+ val curr = next()
if (!pred(curr)) cb += curr
}
cb
@@ -164,7 +165,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = {
while (hasNext) {
- val curr = next
+ val curr = next()
if (pred(curr)) btrue += curr
else bfalse += curr
}
@@ -214,7 +215,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def takeWhile2combiner[U >: T, This](p: T => Boolean, cb: Combiner[U, This]) = {
var loop = true
while (hasNext && loop) {
- val curr = next
+ val curr = next()
if (p(curr)) cb += curr
else loop = false
}
@@ -224,7 +225,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def span2combiners[U >: T, This](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = {
var isBefore = true
while (hasNext && isBefore) {
- val curr = next
+ val curr = next()
if (p(curr)) before += curr
else {
if (isRemainingCheap) after.sizeHint(remaining + 1)
@@ -240,7 +241,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
var last = z
var i = from
while (hasNext) {
- last = op(last, next)
+ last = op(last, next())
array(i) = last
i += 1
}
@@ -249,7 +250,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def scanToCombiner[U >: T, That](startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = {
var curr = startValue
while (hasNext) {
- curr = op(curr, next)
+ curr = op(curr, next())
cb += curr
}
cb
@@ -259,7 +260,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
var curr = startValue
var left = howmany
while (left > 0) {
- curr = op(curr, next)
+ curr = op(curr, next())
cb += curr
left -= 1
}
@@ -269,16 +270,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining)
while (hasNext && otherpit.hasNext) {
- cb += ((next, otherpit.next))
+ cb += ((next(), otherpit.next()))
}
cb
}
def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining)
- while (this.hasNext && that.hasNext) cb += ((this.next, that.next))
- while (this.hasNext) cb += ((this.next, thatelem))
- while (that.hasNext) cb += ((thiselem, that.next))
+ while (this.hasNext && that.hasNext) cb += ((this.next(), that.next()))
+ while (this.hasNext) cb += ((this.next(), thatelem))
+ while (that.hasNext) cb += ((thiselem, that.next()))
cb
}
@@ -298,7 +299,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
var total = 0
var loop = true
while (hasNext && loop) {
- if (pred(next)) total += 1
+ if (pred(next())) total += 1
else loop = false
}
total
@@ -308,7 +309,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
var i = 0
var loop = true
while (hasNext && loop) {
- if (pred(next)) loop = false
+ if (pred(next())) loop = false
else i += 1
}
if (loop) -1 else i
@@ -318,7 +319,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
var pos = -1
var i = 0
while (hasNext) {
- if (pred(next)) pos = i
+ if (pred(next())) pos = i
i += 1
}
pos
@@ -326,7 +327,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = {
while (hasNext && that.hasNext) {
- if (!corr(next, that.next)) return false
+ if (!corr(next(), that.next())) return false
}
hasNext == that.hasNext
}
@@ -348,7 +349,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
//val cb = cbf(repr)
if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[S]()
- while (hasNext) lst ::= f(next)
+ while (hasNext) lst ::= f(next())
while (lst != Nil) {
cb += lst.head
lst = lst.tail
@@ -363,7 +364,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
while (hasNext) {
if (j == index) {
cb += elem
- next
+ next()
} else cb += next
j += 1
}
@@ -438,7 +439,7 @@ self =>
class Taken(taken: Int) extends IterableSplitter[T] {
var remaining = taken min self.remaining
def hasNext = remaining > 0
- def next = { remaining -= 1; self.next }
+ def next = { remaining -= 1; self.next() }
def dup: IterableSplitter[T] = self.dup.take(taken)
def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) }
protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = {
@@ -466,7 +467,7 @@ self =>
class Mapped[S](f: T => S) extends IterableSplitter[S] {
signalDelegate = self.signalDelegate
def hasNext = self.hasNext
- def next = f(self.next)
+ def next = f(self.next())
def remaining = self.remaining
def dup: IterableSplitter[S] = self.dup map f
def split: Seq[IterableSplitter[S]] = self.split.map { _ map f }
@@ -483,8 +484,8 @@ self =>
} else false
def next = if (curr eq self) {
hasNext
- curr.next
- } else curr.next
+ curr.next()
+ } else curr.next()
def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining
protected def firstNonEmpty = (curr eq self) && curr.hasNext
def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that)
@@ -496,7 +497,7 @@ self =>
class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
signalDelegate = self.signalDelegate
def hasNext = self.hasNext && that.hasNext
- def next = (self.next, that.next)
+ def next = (self.next(), that.next())
def remaining = self.remaining min that.remaining
def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that)
def split: Seq[IterableSplitter[(T, S)]] = {
@@ -514,9 +515,10 @@ self =>
signalDelegate = self.signalDelegate
def hasNext = self.hasNext || that.hasNext
def next = if (self.hasNext) {
- if (that.hasNext) (self.next, that.next)
- else (self.next, thatelem)
- } else (thiselem, that.next);
+ if (that.hasNext) (self.next(), that.next())
+ else (self.next(), thatelem)
+ } else (thiselem, that.next())
+
def remaining = self.remaining max that.remaining
def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
def split: Seq[IterableSplitter[(U, S)]] = {
@@ -605,7 +607,7 @@ self =>
} else Seq(sz)
}
val (selfszfrom, thatszfrom) = splitsizes.zip(szcum.init).span(_._2 < selfrem)
- val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 });
+ val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 })
// split iterators
val selfs = self.psplit(selfsizes: _*)
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
index dc49bcf9d7..458742df96 100644
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
@@ -52,7 +52,7 @@ trait Splitter[+T] extends Iterator[T] {
object Splitter {
def empty[T]: Splitter[T] = new Splitter[T] {
def hasNext = false
- def next = Iterator.empty.next
+ def next = Iterator.empty.next()
def split = Seq(this)
}
}
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index cec9e294c1..441c4269c3 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -54,32 +54,22 @@ trait Task[R, +Tp] {
leaf(lastres)
result = result // ensure that effects of `leaf` are visible to readers of `result`
} catchBreak {
- signalAbort
+ signalAbort()
}
} catch {
case thr: Exception =>
result = result // ensure that effects of `leaf` are visible
throwable = thr
- signalAbort
+ signalAbort()
}
}
private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
val that = t.asInstanceOf[Task[R, Tp]]
- val local = result // ensure that any effects of modifying `result` are detected
- // checkMerge(that)
if (this.throwable == null && that.throwable == null) merge(t)
mergeThrowables(that)
}
- private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
- if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
- println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
- } else if (this.throwable != null || that.throwable != null) {
- println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
- }
- }
-
private[parallel] def mergeThrowables(that: Task[_, _]) {
if (this.throwable != null && that.throwable != null) {
// merge exceptions, since there were multiple exceptions
@@ -176,7 +166,6 @@ trait AdaptiveWorkStealingTasks extends Tasks {
while (last.next != null) {
// val lastresult = Option(last.body.result)
- val beforelast = last
last = last.next
if (last.tryCancel()) {
// println("Done with " + beforelast.body + ", next direct is " + last.body)
@@ -202,7 +191,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
last = t
t.start()
}
- } while (head.body.shouldSplitFurther);
+ } while (head.body.shouldSplitFurther)
head.next = last
head
}
@@ -313,7 +302,7 @@ trait ThreadPoolTasks extends Tasks {
() => {
t.sync()
- t.body.forwardThrowable
+ t.body.forwardThrowable()
t.body.result
}
}
@@ -325,7 +314,7 @@ trait ThreadPoolTasks extends Tasks {
t.start()
t.sync()
- t.body.forwardThrowable
+ t.body.forwardThrowable()
t.body.result
}
@@ -357,60 +346,6 @@ object ThreadPoolTasks {
)
}
-
-/** An implementation of tasks objects based on the Java thread pooling API and synchronization using futures. */
-@deprecated("This implementation is not used.", "2.10.0")
-trait FutureThreadPoolTasks extends Tasks {
- import java.util.concurrent._
-
- trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] {
- @volatile var future: Future[_] = null
-
- def start() = {
- executor.synchronized {
- future = executor.submit(this)
- }
- }
- def sync() = future.get
- def tryCancel = false
- def run = {
- compute()
- }
- }
-
- protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
-
- val environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool
- def executor = environment.asInstanceOf[ThreadPoolExecutor]
-
- def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val t = newWrappedTask(task)
-
- // debuglog("-----------> Executing without wait: " + task)
- t.start
-
- () => {
- t.sync
- t.body.forwardThrowable
- t.body.result
- }
- }
-
- def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val t = newWrappedTask(task)
-
- // debuglog("-----------> Executing with wait: " + task)
- t.start
-
- t.sync
- t.body.forwardThrowable
- t.body.result
- }
-
- def parallelismLevel = FutureThreadPoolTasks.numCores
-
-}
-
object FutureThreadPoolTasks {
import java.util.concurrent._
@@ -467,8 +402,8 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
}
() => {
- fjtask.sync
- fjtask.body.forwardThrowable
+ fjtask.sync()
+ fjtask.body.forwardThrowable()
fjtask.body.result
}
}
@@ -489,9 +424,9 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
forkJoinPool.execute(fjtask)
}
- fjtask.sync
+ fjtask.sync()
// if (fjtask.body.throwable != null) println("throwing: " + fjtask.body.throwable + " at " + fjtask.body)
- fjtask.body.forwardThrowable
+ fjtask.body.forwardThrowable()
fjtask.body.result
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index b25230bbeb..f3be47ea03 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -109,7 +109,7 @@ self =>
}
def next(): (K, V) = {
i += 1
- val r = triter.next
+ val r = triter.next()
r
}
def hasNext: Boolean = {
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index e7e64eb2ad..4f34993b85 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -106,7 +106,7 @@ self =>
}
def next(): T = {
i += 1
- triter.next
+ triter.next()
}
def hasNext: Boolean = {
i < sz
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 142f07ff26..ec07e44c4d 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -15,8 +15,6 @@ import scala.collection.generic._
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
-import scala.collection.GenIterable
-
/** A template trait for immutable parallel iterable collections.
*
diff --git a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
deleted file mode 100644
index 5f9c9c3d3d..0000000000
--- a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
+++ /dev/null
@@ -1,128 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.collection.parallel.immutable
-
-
-
-import scala.collection.immutable.NumericRange
-import scala.collection.parallel.Combiner
-import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.ParIterableIterator
-
-
-
-/** Parallel ranges for numeric types.
- *
- * $paralleliterableinfo
- *
- * $sideeffects
- *
- * @param range the sequential range this parallel range was obtained from
- *
- * @author Aleksandar Prokopec
- * @since 2.9
- *
- * @define Coll `immutable.ParRange`
- * @define coll immutable parallel range
- */
-@SerialVersionUID(1L)
-class ParNumericRange[T](val range: NumericRange[T])(implicit num: Integral[T])
-extends ParSeq[T]
- with Serializable
-{
-self =>
-
- def seq = range
-
- @inline final def length = range.length
-
- @inline final def apply(idx: Int) = range.apply(idx);
-
- def parallelIterator = new ParNumericRangeIterator with SCPI
-
- type SCPI = SignalContextPassingIterator[ParNumericRangeIterator]
-
- class ParNumericRangeIterator(range: NumericRange[T] = self.range, num: Integral[T] = self.num)
- extends ParIterator {
- me: SignalContextPassingIterator[ParNumericRangeIterator] =>
- override def toString = "ParNumericRangeIterator(over: " + range + ")"
- private var ind = 0
- private val len = range.length
-
- final def remaining = len - ind
-
- final def hasNext = ind < len
-
- final def next = if (hasNext) {
- val r = range.apply(ind)
- ind += 1
- r
- } else Iterator.empty.next
-
- private def rangeleft: NumericRange[T] = range.drop(ind)
-
- def dup = new ParNumericRangeIterator(rangeleft) with SCPI
-
- def split = {
- val rleft = rangeleft
- val elemleft = rleft.length
- if (elemleft < 2) Seq(new ParNumericRangeIterator(rleft) with SCPI)
- else Seq(
- new ParNumericRangeIterator(rleft.take(elemleft / 2)) with SCPI,
- new ParNumericRangeIterator(rleft.drop(elemleft / 2)) with SCPI
- )
- }
-
- def psplit(sizes: Int*) = {
- var rleft = rangeleft
- for (sz <- sizes) yield {
- val fronttaken = rleft.take(sz)
- rleft = rleft.drop(sz)
- new ParNumericRangeIterator(fronttaken) with SCPI
- }
- }
-
- /* accessors */
-
- override def foreach[U](f: T => U): Unit = {
- rangeleft.foreach(f)
- ind = len
- }
-
- override def reduce[U >: T](op: (U, U) => U): U = {
- val r = rangeleft.reduceLeft(op)
- ind = len
- r
- }
-
- /* transformers */
-
- override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
- while (hasNext) {
- cb += f(next)
- }
- cb
- }
- }
-
-}
-
-
-object ParNumericRange {
- def apply[T](start: T, end: T, step: T, inclusive: Boolean)(implicit num: Integral[T]) = new ParNumericRange[T](
- if (inclusive) NumericRange.inclusive(start, end, step)(num)
- else NumericRange.apply(start, end, step)(num)
- )
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 0c9f82ba2a..78bbad5933 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -12,7 +12,6 @@ import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.IterableSplitter
import scala.collection.Iterator
/** Parallel ranges.
@@ -42,7 +41,7 @@ self =>
@inline final def length = range.length
- @inline final def apply(idx: Int) = range.apply(idx);
+ @inline final def apply(idx: Int) = range.apply(idx)
def splitter = new ParRangeIterator
@@ -60,7 +59,7 @@ self =>
val r = range.apply(ind)
ind += 1
r
- } else Iterator.empty.next
+ } else Iterator.empty.next()
private def rangeleft = range.drop(ind)
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index aa19307387..b54a5f0205 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -18,9 +18,6 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-import scala.collection.GenSeq
-
-
/** An immutable variant of `ParSeq`.
*
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 3622377a55..aba8486ab5 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -9,7 +9,6 @@
package scala.collection
package parallel.immutable
-import scala.collection.GenSet
import scala.collection.generic._
import scala.collection.parallel.ParSetLike
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 0a4f30131f..f9563cacc7 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -181,10 +181,10 @@ self =>
override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op)
- override def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
+ override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
override def sum[U >: T](implicit num: Numeric[U]): U = {
- var s = sum_quick(num, arr, until, i, num.zero)
+ val s = sum_quick(num, arr, until, i, num.zero)
i = until
s
}
@@ -200,7 +200,7 @@ self =>
}
override def product[U >: T](implicit num: Numeric[U]): U = {
- var p = product_quick(num, arr, until, i, num.one)
+ val p = product_quick(num, arr, until, i, num.one)
i = until
p
}
@@ -226,7 +226,7 @@ self =>
if (all) i = nextuntil
else {
i = until
- abort
+ abort()
}
if (isAborted) return false
@@ -241,7 +241,7 @@ self =>
if (p(a(j).asInstanceOf[T])) j += 1
else return false
}
- return true
+ true
}
override def exists(p: T => Boolean): Boolean = {
@@ -254,7 +254,7 @@ self =>
some = exists_quick(p, array, nextuntil, i)
if (some) {
i = until
- abort
+ abort()
} else i = nextuntil
if (isAborted) return true
@@ -269,7 +269,7 @@ self =>
if (p(a(j).asInstanceOf[T])) return true
else j += 1
}
- return false
+ false
}
override def find(p: T => Boolean): Option[T] = {
@@ -283,7 +283,7 @@ self =>
if (r != None) {
i = until
- abort
+ abort()
} else i = nextuntil
if (isAborted) return r
@@ -298,7 +298,7 @@ self =>
if (p(elem)) return Some(elem)
else j += 1
}
- return None
+ None
}
override def drop(n: Int): ParArrayIterator = {
@@ -405,9 +405,10 @@ self =>
private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) {
var j = from
+ val runWith = pf.runWith(b => cb += b)
while (j < ntil) {
val curr = a(j).asInstanceOf[T]
- if (pf.isDefinedAt(curr)) cb += pf(curr)
+ runWith(curr)
j += 1
}
}
@@ -432,7 +433,7 @@ self =>
private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
var j = i
while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
+ val curr = a(j).asInstanceOf[T]
if (pred(curr)) cb += curr
j += 1
}
@@ -447,7 +448,7 @@ self =>
private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
var j = i
while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
+ val curr = a(j).asInstanceOf[T]
if (!pred(curr)) cb += curr
j += 1
}
@@ -576,8 +577,6 @@ self =>
/* operations */
- private def asTask[R, Tp](t: Any) = t.asInstanceOf[Task[R, Tp]]
-
private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]]
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) {
@@ -612,7 +611,8 @@ self =>
class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any])
extends Task[Unit, ScanToArray[U]] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = iterate(tree)
private def iterate(tree: ScanTree[U]): Unit = tree match {
case ScanNode(left, right) =>
@@ -648,7 +648,8 @@ self =>
}
class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = {
val tarr = targetarr
val sarr = array
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index 8bc108a738..aa790dd548 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -38,10 +38,6 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
}
}
- private def checkbounds() = if (idx >= itertable.length) {
- throw new IndexOutOfBoundsException(idx.toString)
- }
-
def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T]
def remaining = totalsize - traversed
@@ -52,7 +48,7 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
idx += 1
if (hasNext) scan()
r
- } else Iterator.empty.next
+ } else Iterator.empty.next()
def dup = newIterator(idx, until, totalsize)
def split = if (remaining > 1) {
val divpt = (until + idx) / 2
@@ -102,11 +98,5 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
}
count
}
-
- private def check() = if (table.slice(idx, until).count(_ != null) != remaining) {
- println("Invariant broken: " + debugInformation)
- assert(false)
- }
}
-
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 11588e555b..e94db89865 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -97,7 +97,8 @@ self =>
class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V])
extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) {
- def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value);
+ def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value)
+
def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) =
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
@@ -166,9 +167,8 @@ private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFa
extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
with scala.collection.mutable.HashTable.HashUtils[K]
{
- private var mask = ParHashMapCombiner.discriminantmask
- private var nonmasklen = ParHashMapCombiner.nonmasklength
- private var seedvalue = 27
+ private val nonmasklen = ParHashMapCombiner.nonmasklength
+ private val seedvalue = 27
def +=(elem: (K, V)) = {
sz += 1
@@ -232,8 +232,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
def setSize(sz: Int) = tableSize = sz
def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = {
var h = index(elemHashCode(e.key))
- // assertCorrectBlock(h, block)
- var olde = table(h).asInstanceOf[DefaultEntry[K, V]]
+ val olde = table(h).asInstanceOf[DefaultEntry[K, V]]
// check if key already exists
var ce = olde
@@ -252,13 +251,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
true
} else false
}
- private def assertCorrectBlock(h: Int, block: Int) {
- val blocksize = table.length / (1 << ParHashMapCombiner.discriminantbits)
- if (!(h >= block * blocksize && h < (block + 1) * blocksize)) {
- println("trying to put " + h + " into block no.: " + block + ", range: [" + block * blocksize + ", " + (block + 1) * blocksize + ">")
- assert(h >= block * blocksize && h < (block + 1) * blocksize)
- }
- }
protected def createNewEntry[X](key: K, x: X) = ???
}
@@ -288,7 +280,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
val chunksz = unrolled.size
while (i < chunksz) {
val elem = chunkarr(i)
- // assertCorrectBlock(block, elem.key)
if (t.insertEntry(elem)) insertcount += 1
i += 1
}
@@ -297,13 +288,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
}
insertcount
}
- private def assertCorrectBlock(block: Int, k: K) {
- val hc = improve(elemHashCode(k), seedvalue)
- if ((hc >>> nonmasklen) != block) {
- println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block)
- assert((hc >>> nonmasklen) == block)
- }
- }
def split = {
val fp = howmany / 2
List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp))
@@ -320,7 +304,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
private[parallel] object ParHashMapCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
- private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+ private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
private[mutable] val nonmasklength = 32 - discriminantbits
def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 57fab57348..0287171369 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -60,18 +60,18 @@ extends ParSet[T]
override def seq = new scala.collection.mutable.HashSet(hashTableContents)
def +=(elem: T) = {
- addEntry(elem)
+ addElem(elem)
this
}
def -=(elem: T) = {
- removeEntry(elem)
+ removeElem(elem)
this
}
override def stringPrefix = "ParHashSet"
- def contains(elem: T) = containsEntry(elem)
+ def contains(elem: T) = containsElem(elem)
def splitter = new ParHashSetIterator(0, table.length, size)
@@ -117,23 +117,23 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
-extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
with scala.collection.mutable.FlatHashTable.HashUtils[T] {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
- private var mask = ParHashSetCombiner.discriminantmask
- private var nonmasklen = ParHashSetCombiner.nonmasklength
- private var seedvalue = 27
+ private val nonmasklen = ParHashSetCombiner.nonmasklength
+ private val seedvalue = 27
def +=(elem: T) = {
+ val entry = elemToEntry(elem)
sz += 1
- val hc = improve(elemHashCode(elem), seedvalue)
+ val hc = improve(entry.hashCode, seedvalue)
val pos = hc >>> nonmasklen
if (buckets(pos) eq null) {
// initialize bucket
- buckets(pos) = new UnrolledBuffer[Any]
+ buckets(pos) = new UnrolledBuffer[AnyRef]
}
// add to bucket
- buckets(pos) += elem
+ buckets(pos) += entry
this
}
@@ -147,7 +147,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue)
val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
var leftinserts = 0
- for (elem <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, elem.asInstanceOf[T])
+ for (entry <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, entry)
table.setSize(leftinserts + inserted)
table.hashTableContents
}
@@ -159,10 +159,10 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
sizeMapInit(table.length)
seedvalue = ParHashSetCombiner.this.seedvalue
for {
- buffer <- buckets;
- if buffer ne null;
- elem <- buffer
- } addEntry(elem.asInstanceOf[T])
+ buffer <- buckets
+ if buffer ne null
+ entry <- buffer
+ } addEntry(entry)
}
tbl.hashTableContents
}
@@ -189,12 +189,12 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
def setSize(sz: Int) = tableSize = sz
/**
- * The elements are added using the `insertEntry` method. This method accepts three
+ * The elements are added using the `insertElem` method. This method accepts three
* arguments:
*
* @param insertAt where to add the element (set to -1 to use its hashcode)
* @param comesBefore the position before which the element should be added to
- * @param elem the element to be added
+ * @param newEntry the element to be added
*
* If the element is to be inserted at the position corresponding to its hash code,
* the table will try to add the element in such a position if possible. Collisions are resolved
@@ -206,17 +206,17 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
* If the element is already present in the hash table, it is not added, and this method
* returns 0. If the element is added, it returns 1.
*/
- def insertEntry(insertAt: Int, comesBefore: Int, elem: T): Int = {
+ def insertEntry(insertAt: Int, comesBefore: Int, newEntry : AnyRef): Int = {
var h = insertAt
- if (h == -1) h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry) {
- if (entry == elem) return 0
+ if (h == -1) h = index(newEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry) {
+ if (curEntry == newEntry) return 0
h = h + 1 // we *do not* do `(h + 1) % table.length` here, because we'll never overflow!!
if (h >= comesBefore) return -1
- entry = table(h)
+ curEntry = table(h)
}
- table(h) = elem.asInstanceOf[AnyRef]
+ table(h) = newEntry
// this is incorrect since we set size afterwards anyway and a counter
// like this would not even work:
@@ -233,13 +233,14 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
/* tasks */
- class FillBlocks(buckets: Array[UnrolledBuffer[Any]], table: AddingFlatHashTable, val offset: Int, val howmany: Int)
- extends Task[(Int, UnrolledBuffer[Any]), FillBlocks] {
- var result = (Int.MinValue, new UnrolledBuffer[Any]);
- def leaf(prev: Option[(Int, UnrolledBuffer[Any])]) {
+ class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int)
+ extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] {
+ var result = (Int.MinValue, new UnrolledBuffer[AnyRef])
+
+ def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) {
var i = offset
var totalinserts = 0
- var leftover = new UnrolledBuffer[Any]()
+ var leftover = new UnrolledBuffer[AnyRef]()
while (i < (offset + howmany)) {
val (inserted, intonextblock) = fillBlock(i, buckets(i), leftover)
totalinserts += inserted
@@ -251,11 +252,11 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
private val blocksize = table.tableLength >> ParHashSetCombiner.discriminantbits
private def blockStart(block: Int) = block * blocksize
private def nextBlockStart(block: Int) = (block + 1) * blocksize
- private def fillBlock(block: Int, elems: UnrolledBuffer[Any], leftovers: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = {
+ private def fillBlock(block: Int, elems: UnrolledBuffer[AnyRef], leftovers: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = {
val beforePos = nextBlockStart(block)
// store the elems
- val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[Any]())
+ val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[AnyRef]())
// store the leftovers
val (leftoversIn, leftoversLeft) = insertAll(blockStart(block), beforePos, leftovers)
@@ -263,21 +264,21 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
// return the no. of stored elements tupled with leftovers
(elemsIn + leftoversIn, elemsLeft concat leftoversLeft)
}
- private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = {
- var leftovers = new UnrolledBuffer[Any]
+ private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = {
+ val leftovers = new UnrolledBuffer[AnyRef]
var inserted = 0
var unrolled = elems.headPtr
var i = 0
- var t = table
+ val t = table
while (unrolled ne null) {
val chunkarr = unrolled.array
val chunksz = unrolled.size
while (i < chunksz) {
- val elem = chunkarr(i)
- val res = t.insertEntry(atPos, beforePos, elem.asInstanceOf[T])
+ val entry = chunkarr(i)
+ val res = t.insertEntry(atPos, beforePos, entry)
if (res >= 0) inserted += res
- else leftovers += elem
+ else leftovers += entry
i += 1
}
i = 0
@@ -319,7 +320,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
private[parallel] object ParHashSetCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
- private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+ private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
private[mutable] val nonmasklength = 32 - discriminantbits
def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]]
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 66ddef6a1e..5aa1dba17c 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -110,7 +110,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
} else Seq(this.asInstanceOf[IterRepr])
private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = {
- var buff = mutable.ArrayBuffer[Entry]()
+ val buff = mutable.ArrayBuffer[Entry]()
var curr = chainhead
while (curr ne null) {
buff += curr
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index 7090c510a0..d76e4b1745 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -12,8 +12,6 @@ package scala.collection.parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
-import scala.collection.GenIterable
-
/** A template trait for mutable parallel iterable collections.
*
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index cdcfc59f8f..08bc706c8a 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -12,13 +12,10 @@ package mutable
import scala.collection.generic._
-import scala.collection.mutable.Builder
import scala.collection.mutable.Cloneable
import scala.collection.generic.Growable
import scala.collection.generic.Shrinkable
-
-
/** A template trait for mutable parallel maps. This trait is to be mixed in
* with concrete parallel maps to override the representation type.
*
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 95a4d4a13a..8a55ab83f1 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -17,12 +17,6 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-import scala.collection.GenSeq
-
-
-
-
-
/** A mutable variant of `ParSeq`.
*
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index d8f821746c..ca41852512 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -13,11 +13,6 @@ package scala.collection.parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.Combiner
-import scala.collection.GenSet
-
-
-
-
/** A mutable variant of `ParSet`.
*
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 609888f1a9..0941229124 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -10,17 +10,11 @@
package scala.collection
package parallel.mutable
-
-
-import scala.collection.mutable.Set
-import scala.collection.mutable.Builder
import scala.collection.mutable.Cloneable
import scala.collection.GenSetLike
import scala.collection.generic.Growable
import scala.collection.generic.Shrinkable
-
-
/** A template trait for mutable parallel sets. This trait is mixed in with concrete
* parallel sets to override the representation type.
*
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index 61a50a124d..60f4709a8c 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -136,7 +136,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
}
def dup = {
- val it = newIterator(0, ct, false)
+ val it = newIterator(0, ct, _mustInit = false)
dupTo(it)
it.iterated = this.iterated
it
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index dc31d1bc25..0b9b51bc5b 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -26,7 +26,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
// public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden.
- def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
+ final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
def allocateAndCopy = if (chain.size > 1) {
val arrayseq = new ArraySeq[T](size)
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index c3a379485d..7766f07e23 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -47,7 +47,7 @@ extends Combiner[T, ParArray[T]] {
}
def clear() {
- buff.clear
+ buff.clear()
}
override def sizeHint(sz: Int) = {
@@ -69,7 +69,8 @@ extends Combiner[T, ParArray[T]] {
class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int)
extends Task[Unit, CopyUnrolledToArray] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = if (howmany > 0) {
var totalleft = howmany
val (startnode, startpos) = findStart(offset)
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 83aa99ad11..d91f70da75 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -139,7 +139,7 @@ package parallel {
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
final case class CompositeThrowable(
- val throwables: Set[Throwable]
+ throwables: Set[Throwable]
) extends Exception(
"Multiple exceptions thrown during a parallel computation: " +
throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n")
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 2ab7ea726a..7428cd2b81 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -69,7 +69,7 @@ class Script[A] extends ArrayBuffer[Message[A]] with Message[A] {
override def toString(): String = {
var res = "Script("
- var it = this.iterator
+ val it = this.iterator
var i = 1
while (it.hasNext) {
if (i > 1)
diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala
new file mode 100644
index 0000000000..a0d7aaea47
--- /dev/null
+++ b/src/library/scala/concurrent/BatchingExecutor.scala
@@ -0,0 +1,117 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import java.util.concurrent.Executor
+import scala.annotation.tailrec
+
+/**
+ * Mixin trait for an Executor
+ * which groups multiple nested `Runnable.run()` calls
+ * into a single Runnable passed to the original
+ * Executor. This can be a useful optimization
+ * because it bypasses the original context's task
+ * queue and keeps related (nested) code on a single
+ * thread which may improve CPU affinity. However,
+ * if tasks passed to the Executor are blocking
+ * or expensive, this optimization can prevent work-stealing
+ * and make performance worse. Also, some ExecutionContext
+ * may be fast enough natively that this optimization just
+ * adds overhead.
+ * The default ExecutionContext.global is already batching
+ * or fast enough not to benefit from it; while
+ * `fromExecutor` and `fromExecutorService` do NOT add
+ * this optimization since they don't know whether the underlying
+ * executor will benefit from it.
+ * A batching executor can create deadlocks if code does
+ * not use `scala.concurrent.blocking` when it should,
+ * because tasks created within other tasks will block
+ * on the outer task completing.
+ * This executor may run tasks in any order, including LIFO order.
+ * There are no ordering guarantees.
+ *
+ * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
+ * in the calling thread synchronously. It must enqueue/handoff the Runnable.
+ */
+private[concurrent] trait BatchingExecutor extends Executor {
+
+ // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
+ private val _tasksLocal = new ThreadLocal[List[Runnable]]()
+
+ private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
+ private var parentBlockContext: BlockContext = _
+ // this method runs in the delegate ExecutionContext's thread
+ override def run(): Unit = {
+ require(_tasksLocal.get eq null)
+
+ val prevBlockContext = BlockContext.current
+ BlockContext.withBlockContext(this) {
+ try {
+ parentBlockContext = prevBlockContext
+
+ @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
+ case Nil => ()
+ case head :: tail =>
+ _tasksLocal set tail
+ try {
+ head.run()
+ } catch {
+ case t: Throwable =>
+ // if one task throws, move the
+ // remaining tasks to another thread
+ // so we can throw the exception
+ // up to the invoking executor
+ val remaining = _tasksLocal.get
+ _tasksLocal set Nil
+ unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
+ throw t // rethrow
+ }
+ processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
+ }
+
+ processBatch(initial)
+ } finally {
+ _tasksLocal.remove()
+ parentBlockContext = null
+ }
+ }
+ }
+
+ override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
+ // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
+ {
+ val tasks = _tasksLocal.get
+ _tasksLocal set Nil
+ if ((tasks ne null) && tasks.nonEmpty)
+ unbatchedExecute(new Batch(tasks))
+ }
+
+ // now delegate the blocking to the previous BC
+ require(parentBlockContext ne null)
+ parentBlockContext.blockOn(thunk)
+ }
+ }
+
+ protected def unbatchedExecute(r: Runnable): Unit
+
+ override def execute(runnable: Runnable): Unit = {
+ if (batchable(runnable)) { // If we can batch the runnable
+ _tasksLocal.get match {
+ case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+ case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
+ }
+ } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying
+ }
+
+ /** Override this to define which runnables will be batched. */
+ def batchable(runnable: Runnable): Boolean = runnable match {
+ case _: OnCompleteRunnable => true
+ case _ => false
+ }
+}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 6b6ad29074..95b393dd0e 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -576,7 +576,7 @@ object Future {
def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
in.foldLeft(Promise.successful(cbf(in)).future) {
(fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
- } map (_.result)
+ } map (_.result())
}
/** Returns a `Future` to the result of the first future in the list that is completed.
@@ -638,7 +638,7 @@ object Future {
* }}}
*/
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
- if (futures.isEmpty) Promise[R].failure(new NoSuchElementException("reduce attempted on empty collection")).future
+ if (futures.isEmpty) Promise[R]().failure(new NoSuchElementException("reduce attempted on empty collection")).future
else sequence(futures).map(_ reduceLeft op)
}
@@ -654,7 +654,7 @@ object Future {
in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) =>
val fb = fn(a.asInstanceOf[A])
for (r <- fr; b <- fb) yield (r += b)
- }.map(_.result)
+ }.map(_.result())
// This is used to run callbacks which are internal
// to scala.concurrent; our own callbacks are only
@@ -675,111 +675,11 @@ object Future {
// by just not ever using it itself. scala.concurrent
// doesn't need to create defaultExecutionContext as
// a side effect.
- private[concurrent] object InternalCallbackExecutor extends ExecutionContext with java.util.concurrent.Executor {
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor {
+ override protected def unbatchedExecute(r: Runnable): Unit =
+ r.run()
override def reportFailure(t: Throwable): Unit =
throw new IllegalStateException("problem in scala.concurrent internal callback", t)
-
- /**
- * The BatchingExecutor trait had to be inlined into InternalCallbackExecutor for binary compatibility.
- *
- * BatchingExecutor is a trait for an Executor
- * which groups multiple nested `Runnable.run()` calls
- * into a single Runnable passed to the original
- * Executor. This can be a useful optimization
- * because it bypasses the original context's task
- * queue and keeps related (nested) code on a single
- * thread which may improve CPU affinity. However,
- * if tasks passed to the Executor are blocking
- * or expensive, this optimization can prevent work-stealing
- * and make performance worse. Also, some ExecutionContext
- * may be fast enough natively that this optimization just
- * adds overhead.
- * The default ExecutionContext.global is already batching
- * or fast enough not to benefit from it; while
- * `fromExecutor` and `fromExecutorService` do NOT add
- * this optimization since they don't know whether the underlying
- * executor will benefit from it.
- * A batching executor can create deadlocks if code does
- * not use `scala.concurrent.blocking` when it should,
- * because tasks created within other tasks will block
- * on the outer task completing.
- * This executor may run tasks in any order, including LIFO order.
- * There are no ordering guarantees.
- *
- * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
- * in the calling thread synchronously. It must enqueue/handoff the Runnable.
- */
- // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
- private val _tasksLocal = new ThreadLocal[List[Runnable]]()
-
- private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
- private[this] var parentBlockContext: BlockContext = _
- // this method runs in the delegate ExecutionContext's thread
- override def run(): Unit = {
- require(_tasksLocal.get eq null)
-
- val prevBlockContext = BlockContext.current
- BlockContext.withBlockContext(this) {
- try {
- parentBlockContext = prevBlockContext
-
- @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
- case Nil => ()
- case head :: tail =>
- _tasksLocal set tail
- try {
- head.run()
- } catch {
- case t: Throwable =>
- // if one task throws, move the
- // remaining tasks to another thread
- // so we can throw the exception
- // up to the invoking executor
- val remaining = _tasksLocal.get
- _tasksLocal set Nil
- unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
- throw t // rethrow
- }
- processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
- }
-
- processBatch(initial)
- } finally {
- _tasksLocal.remove()
- parentBlockContext = null
- }
- }
- }
-
- override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
- // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
- {
- val tasks = _tasksLocal.get
- _tasksLocal set Nil
- if ((tasks ne null) && tasks.nonEmpty)
- unbatchedExecute(new Batch(tasks))
- }
-
- // now delegate the blocking to the previous BC
- require(parentBlockContext ne null)
- parentBlockContext.blockOn(thunk)
- }
- }
-
- override def execute(runnable: Runnable): Unit = runnable match {
- // If we can batch the runnable
- case _: OnCompleteRunnable =>
- _tasksLocal.get match {
- case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
- case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
- }
-
- // If not batchable, just delegate to underlying
- case _ =>
- unbatchedExecute(runnable)
- }
-
- private def unbatchedExecute(r: Runnable): Unit = r.run()
}
}
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index eeadaddb5e..9e27ce65b9 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2009-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2009-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@ package scala.concurrent
import scala.language.{implicitConversions, higherKinds}
-/** The `FutureTaskRunner</code> trait is a base trait of task runners
+/** The `FutureTaskRunner` trait is a base trait of task runners
* that provide some sort of future abstraction.
*
* @author Philipp Haller
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index d6a7c1f1bb..3d0597ca22 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -18,38 +18,6 @@ import scala.language.implicitConversions
*/
object JavaConversions {
- @deprecated("Use `asExecutionContext` instead.", "2.10.0")
- implicit def asTaskRunner(exec: ExecutorService): FutureTaskRunner =
- new ThreadPoolRunner {
- override protected def executor =
- exec
-
- def shutdown() =
- exec.shutdown()
- }
-
- @deprecated("Use `asExecutionContext` instead.", "2.10.0")
- implicit def asTaskRunner(exec: Executor): TaskRunner =
- new TaskRunner {
- type Task[T] = Runnable
-
- implicit def functionAsTask[T](fun: () => T): Task[T] = new Runnable {
- def run() { fun() }
- }
-
- def execute[S](task: Task[S]) {
- exec.execute(task)
- }
-
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
- def shutdown() {
- // do nothing
- }
- }
-
/**
* Creates a new `ExecutionContext` which uses the provided `ExecutorService`.
*/
diff --git a/src/library/scala/concurrent/TaskRunners.scala b/src/library/scala/concurrent/TaskRunners.scala
deleted file mode 100644
index e109a8abf9..0000000000
--- a/src/library/scala/concurrent/TaskRunners.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.util.concurrent.{ThreadPoolExecutor, LinkedBlockingQueue, TimeUnit}
-
-/** The `TaskRunners` object...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-object TaskRunners {
-
- implicit val threadRunner: FutureTaskRunner =
- new ThreadRunner
-
- implicit val threadPoolRunner: FutureTaskRunner = {
- val numCores = Runtime.getRuntime().availableProcessors()
- val keepAliveTime = 60000L
- val workQueue = new LinkedBlockingQueue[Runnable]
- val exec = new ThreadPoolExecutor(numCores,
- numCores,
- keepAliveTime,
- TimeUnit.MILLISECONDS,
- workQueue,
- new ThreadPoolExecutor.CallerRunsPolicy)
- JavaConversions.asTaskRunner(exec)
- }
-
-}
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
deleted file mode 100644
index cd92db9486..0000000000
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.lang.Thread
-import scala.language.implicitConversions
-
-/** The `ThreadRunner` trait...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-class ThreadRunner extends FutureTaskRunner {
-
- type Task[T] = () => T
- type Future[T] = () => T
-
- implicit def functionAsTask[S](fun: () => S): Task[S] = fun
- implicit def futureAsFunction[S](x: Future[S]): () => S = x
-
- /* If expression computed successfully return it in `Right`,
- * otherwise return exception in `Left`.
- */
- private def tryCatch[A](body: => A): Either[Exception, A] =
- try Right(body) catch {
- case ex: Exception => Left(ex)
- }
-
- def execute[S](task: Task[S]) {
- val runnable = new Runnable {
- def run() { tryCatch(task()) }
- }
- (new Thread(runnable)).start()
- }
-
- def submit[S](task: Task[S]): Future[S] = {
- val result = new SyncVar[Either[Exception, S]]
- val runnable = new Runnable {
- def run() { result set tryCatch(task()) }
- }
- (new Thread(runnable)).start()
- () => result.get.fold[S](throw _, identity _)
- }
-
- @deprecated("Use `blocking` instead.", "2.10.0")
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
- def shutdown() {
- // do nothing
- }
-
-}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 0353d61b22..6c6155279d 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -103,7 +103,7 @@ object Duration {
* Extract length and time unit out of a duration, if it is finite.
*/
def unapply(d: Duration): Option[(Long, TimeUnit)] =
- if (d.isFinite) Some((d.length, d.unit)) else None
+ if (d.isFinite()) Some((d.length, d.unit)) else None
/**
* Construct a possibly infinite or undefined Duration from the given number of nanoseconds.
@@ -623,7 +623,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
// if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
private[this] def minusZero = -0d
def /(divisor: Duration): Double =
- if (divisor.isFinite) toNanos.toDouble / divisor.toNanos
+ if (divisor.isFinite()) toNanos.toDouble / divisor.toNanos
else if (divisor eq Undefined) Double.NaN
else if ((length < 0) ^ (divisor > Zero)) 0d
else minusZero
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 0aa6b37ffc..e4a0f464f9 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -96,12 +96,26 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
}
-
def execute(runnable: Runnable): Unit = executor match {
case fj: ForkJoinPool =>
val fjt = runnable match {
case t: ForkJoinTask[_] => t
- case runnable => new ForkJoinTask[Unit] {
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
+ case _ => fj execute fjt
+ }
+ case generic => generic execute runnable
+ }
+
+ def reportFailure(t: Throwable) = reporter(t)
+}
+
+
+private[concurrent] object ExecutionContextImpl {
+
+ final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
final override def setRawResult(u: Unit): Unit = ()
final override def getRawResult(): Unit = ()
final override def exec(): Boolean = try { runnable.run(); true } catch {
@@ -114,18 +128,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
throw anything
}
}
- }
- Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
- case _ => fj execute fjt
- }
- case generic => generic execute runnable
- }
-
- def reportFailure(t: Throwable) = reporter(t)
-}
-private[concurrent] object ExecutionContextImpl {
def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 8c2a77c75f..055ce6e4fa 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -12,7 +12,7 @@ package scala.concurrent.impl
import scala.concurrent.ExecutionContext
import scala.util.control.NonFatal
-import scala.util.{Try, Success, Failure}
+import scala.util.{ Success, Failure }
private[concurrent] object Future {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 52f1075137..7af70400ef 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -83,7 +83,7 @@ private[concurrent] object Promise {
import Duration.Undefined
atMost match {
case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
- case Duration.Inf => awaitUnbounded
+ case Duration.Inf => awaitUnbounded()
case Duration.MinusInf => isCompleted
case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted
}
@@ -135,7 +135,7 @@ private[concurrent] object Promise {
}
def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val preparedEC = executor.prepare
+ val preparedEC = executor.prepare()
val runnable = new CallbackRunnable[T](preparedEC, func)
@tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
@@ -162,7 +162,7 @@ private[concurrent] object Promise {
def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
val completedAs = value.get
- val preparedEC = executor.prepare
+ val preparedEC = executor.prepare()
(new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs)
}
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
deleted file mode 100644
index 4c91e78dc7..0000000000
--- a/src/library/scala/concurrent/ops.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.lang.Thread
-import scala.util.control.Exception.allCatch
-
-/** The object `ops` ...
- *
- * @author Martin Odersky, Stepan Koltsov, Philipp Haller
- */
-@deprecated("Use `Future` instead.", "2.10.0")
-object ops
-{
- val defaultRunner: FutureTaskRunner = TaskRunners.threadRunner
-
- /**
- * If expression computed successfully return it in `Right`,
- * otherwise return exception in `Left`.
- */
- private def tryCatch[A](body: => A): Either[Throwable, A] =
- allCatch[A] either body
-
- private def getOrThrow[T <: Throwable, A](x: Either[T, A]): A =
- x.fold[A](throw _, identity _)
-
- /** Evaluates an expression asynchronously.
- *
- * @param p the expression to evaluate
- */
- def spawn(p: => Unit)(implicit runner: TaskRunner = defaultRunner): Unit = {
- runner execute runner.functionAsTask(() => p)
- }
-
- /** Evaluates an expression asynchronously, and returns a closure for
- * retrieving the result.
- *
- * @param p the expression to evaluate
- * @return a closure which returns the result once it has been computed
- */
- def future[A](p: => A)(implicit runner: FutureTaskRunner = defaultRunner): () => A = {
- runner.futureAsFunction(runner submit runner.functionAsTask(() => p))
- }
-
- /** Evaluates two expressions in parallel. Invoking `par` blocks the current
- * thread until both expressions have been evaluated.
- *
- * @param xp the first expression to evaluate
- * @param yp the second expression to evaluate
- *
- * @return a pair holding the evaluation results
- */
- def par[A, B](xp: => A, yp: => B)(implicit runner: TaskRunner = defaultRunner): (A, B) = {
- val y = new SyncVar[Either[Throwable, B]]
- spawn { y set tryCatch(yp) }
- (xp, getOrThrow(y.get))
- }
-
-/*
- def parMap[a,b](f: a => b, xs: Array[a]): Array[b] = {
- val results = new Array[b](xs.length);
- replicate(0, xs.length) { i => results(i) = f(xs(i)) }
- results
- }
-*/
-
-}
diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala
new file mode 100644
index 0000000000..6b00eb283f
--- /dev/null
+++ b/src/library/scala/io/AnsiColor.scala
@@ -0,0 +1,53 @@
+package scala
+package io
+
+trait AnsiColor {
+ /** Foreground color for ANSI black */
+ final val BLACK = "\033[30m"
+ /** Foreground color for ANSI red */
+ final val RED = "\033[31m"
+ /** Foreground color for ANSI green */
+ final val GREEN = "\033[32m"
+ /** Foreground color for ANSI yellow */
+ final val YELLOW = "\033[33m"
+ /** Foreground color for ANSI blue */
+ final val BLUE = "\033[34m"
+ /** Foreground color for ANSI magenta */
+ final val MAGENTA = "\033[35m"
+ /** Foreground color for ANSI cyan */
+ final val CYAN = "\033[36m"
+ /** Foreground color for ANSI white */
+ final val WHITE = "\033[37m"
+
+ /** Background color for ANSI black */
+ final val BLACK_B = "\033[40m"
+ /** Background color for ANSI red */
+ final val RED_B = "\033[41m"
+ /** Background color for ANSI green */
+ final val GREEN_B = "\033[42m"
+ /** Background color for ANSI yellow */
+ final val YELLOW_B = "\033[43m"
+ /** Background color for ANSI blue */
+ final val BLUE_B = "\033[44m"
+ /** Background color for ANSI magenta */
+ final val MAGENTA_B = "\033[45m"
+ /** Background color for ANSI cyan */
+ final val CYAN_B = "\033[46m"
+ /** Background color for ANSI white */
+ final val WHITE_B = "\033[47m"
+
+ /** Reset ANSI styles */
+ final val RESET = "\033[0m"
+ /** ANSI bold */
+ final val BOLD = "\033[1m"
+ /** ANSI underlines */
+ final val UNDERLINED = "\033[4m"
+ /** ANSI blink */
+ final val BLINK = "\033[5m"
+ /** ANSI reversed */
+ final val REVERSED = "\033[7m"
+ /** ANSI invisible */
+ final val INVISIBLE = "\033[8m"
+}
+
+object AnsiColor extends AnsiColor { }
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 767f06fd3f..e250da27c3 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -73,7 +73,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
if (nextLine == null) lineReader.readLine
else try nextLine finally nextLine = null
}
- if (result == null) Iterator.empty.next
+ if (result == null) Iterator.empty.next()
else result
}
}
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
deleted file mode 100644
index 2c4a0bd2da..0000000000
--- a/src/library/scala/io/BytePickle.scala
+++ /dev/null
@@ -1,318 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.io
-
-import scala.collection.mutable
-
-/**
- * Pickler combinators.
- * Based on a Haskell library by Andrew Kennedy,
- * see <a href="http://research.microsoft.com/~akenn/fun/"
- * target="_top">http://research.microsoft.com/~akenn/fun/</a>.
- *
- * @author Philipp Haller
- * @version 1.1
- */
-@deprecated("This class will be removed.", "2.10.0")
-object BytePickle {
- abstract class SPU[T] {
- def appP(a: T, state: PicklerState): PicklerState
- def appU(state: UnPicklerState): (T, UnPicklerState)
- }
-
- def pickle[T](p: SPU[T], a: T): Array[Byte] =
- p.appP(a, new PicklerState(new Array[Byte](0), new PicklerEnv)).stream
-
- def unpickle[T](p: SPU[T], stream: Array[Byte]): T =
- p.appU(new UnPicklerState(stream, new UnPicklerEnv))._1
-
- abstract class PU[T] {
- def appP(a: T, state: Array[Byte]): Array[Byte]
- def appU(state: Array[Byte]): (T, Array[Byte])
- }
-
- def upickle[T](p: PU[T], a: T): Array[Byte] =
- p.appP(a, new Array[Byte](0))
-
- def uunpickle[T](p: PU[T], stream: Array[Byte]): T =
- p.appU(stream)._1
-
- class PicklerEnv extends mutable.HashMap[Any, Int] {
- private var cnt: Int = 64
- def nextLoc() = { cnt += 1; cnt }
- }
-
- class UnPicklerEnv extends mutable.HashMap[Int, Any] {
- private var cnt: Int = 64
- def nextLoc() = { cnt += 1; cnt }
- }
-
- class PicklerState(val stream: Array[Byte], val dict: PicklerEnv)
- class UnPicklerState(val stream: Array[Byte], val dict: UnPicklerEnv)
-
- abstract class RefDef
- case class Ref() extends RefDef
- case class Def() extends RefDef
-
- def refDef: PU[RefDef] = new PU[RefDef] {
- def appP(b: RefDef, s: Array[Byte]): Array[Byte] =
- b match {
- case Ref() => Array.concat(s, Array[Byte](0))
- case Def() => Array.concat(s, Array[Byte](1))
- };
- def appU(s: Array[Byte]): (RefDef, Array[Byte]) =
- if (s(0) == (0: Byte)) (Ref(), s.slice(1, s.length))
- else (Def(), s.slice(1, s.length));
- }
-
- val REF = 0
- val DEF = 1
-
- def unat: PU[Int] = new PU[Int] {
- def appP(n: Int, s: Array[Byte]): Array[Byte] =
- Array.concat(s, nat2Bytes(n));
- def appU(s: Array[Byte]): (Int, Array[Byte]) = {
- var num = 0
- def readNat: Int = {
- var b = 0;
- var x = 0;
- do {
- b = s(num)
- num += 1
- x = (x << 7) + (b & 0x7f);
- } while ((b & 0x80) != 0);
- x
- }
- (readNat, s.slice(num, s.length))
- }
- }
-
- def share[a](pa: SPU[a]): SPU[a] = new SPU[a] {
- def appP(v: a, state: PicklerState): PicklerState = {
- /*
- - is there some value equal to v associated with a location l in the pickle environment?
- - yes: write REF-tag to outstream together with l
- - no:
- write DEF-tag to outstream
- record current location l of outstream
- --> serialize value
- add entry to pickle environment, mapping v onto l
- */
- val pe = state.dict
- pe.get(v) match {
- case None =>
- val sPrime = refDef.appP(Def(), state.stream)
- val l = pe.nextLoc()
-
- val sPrimePrime = pa.appP(v, new PicklerState(sPrime, pe))
-
- pe.update(v, l)
-
- return sPrimePrime
- case Some(l) =>
- val sPrime = refDef.appP(Ref(), state.stream)
-
- return new PicklerState(unat.appP(l, sPrime), pe)
- }
- }
- def appU(state: UnPicklerState): (a, UnPicklerState) = {
- /*
- - first, read tag (i.e. DEF or REF)
- - if REF:
- read location l
- look up resulting value in unpickler environment
- - if DEF:
- record location l of input stream
- --> deserialize value v with argument deserializer
- add entry to unpickler environment, mapping l onto v
- */
- val upe = state.dict
- val res = refDef.appU(state.stream)
- res._1 match {
- case Def() =>
- val l = upe.nextLoc
- val res2 = pa.appU(new UnPicklerState(res._2, upe))
- upe.update(l, res2._1)
- return res2
- case Ref() =>
- val res2 = unat.appU(res._2) // read location
- upe.get(res2._1) match { // lookup value in unpickler env
- case None => throw new IllegalArgumentException("invalid unpickler environment")
- case Some(v) => return (v.asInstanceOf[a], new UnPicklerState(res2._2, upe))
- }
- }
- }
- }
-
- def ulift[t](x: t): PU[t] = new PU[t] {
- def appP(a: t, state: Array[Byte]): Array[Byte] =
- if (x != a) throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x)
- else state;
- def appU(state: Array[Byte]) = (x, state)
- }
-
- def lift[t](x: t): SPU[t] = new SPU[t] {
- def appP(a: t, state: PicklerState): PicklerState =
- if (x != a) { /*throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x);*/ state }
- else state;
- def appU(state: UnPicklerState) = (x, state)
- }
-
- def usequ[t,u](f: u => t, pa: PU[t], k: t => PU[u]): PU[u] = new PU[u] {
- def appP(b: u, s: Array[Byte]): Array[Byte] = {
- val a = f(b)
- val sPrime = pa.appP(a, s)
- val pb = k(a)
- val sPrimePrime = pb.appP(b, sPrime)
- sPrimePrime
- }
- def appU(s: Array[Byte]): (u, Array[Byte]) = {
- val resPa = pa.appU(s)
- val a = resPa._1
- val sPrime = resPa._2
- val pb = k(a)
- pb.appU(sPrime)
- }
- }
-
- def sequ[t,u](f: u => t, pa: SPU[t], k: t => SPU[u]): SPU[u] = new SPU[u] {
- def appP(b: u, s: PicklerState): PicklerState = {
- val a = f(b)
- val sPrime = pa.appP(a, s)
- val pb = k(a)
- pb.appP(b, sPrime)
- }
- def appU(s: UnPicklerState): (u, UnPicklerState) = {
- val resPa = pa.appU(s)
- val a = resPa._1
- val sPrime = resPa._2
- val pb = k(a)
- pb.appU(sPrime)
- }
- }
-
- def upair[a,b](pa: PU[a], pb: PU[b]): PU[(a,b)] = {
- def fst(p: (a,b)): a = p._1
- def snd(p: (a,b)): b = p._2
- usequ(fst, pa, (x: a) => usequ(snd, pb, (y: b) => ulift((x, y))))
- }
-
- def pair[a,b](pa: SPU[a], pb: SPU[b]): SPU[(a,b)] = {
- def fst(p: (a,b)): a = p._1
- def snd(p: (a,b)): b = p._2
- sequ(fst, pa, (x: a) => sequ(snd, pb, (y: b) => lift((x, y))))
- }
-
- def triple[a,b,c](pa: SPU[a], pb: SPU[b], pc: SPU[c]): SPU[(a,b,c)] = {
- def fst(p: (a,b,c)): a = p._1
- def snd(p: (a,b,c)): b = p._2
- def trd(p: (a,b,c)): c = p._3
-
- sequ(fst, pa,
- (x: a) => sequ(snd, pb,
- (y: b) => sequ(trd, pc,
- (z: c) => lift((x, y, z)))))
- }
-
- def uwrap[a,b](i: a => b, j: b => a, pa: PU[a]): PU[b] =
- usequ(j, pa, (x: a) => ulift(i(x)))
-
- def wrap[a,b](i: a => b, j: b => a, pa: SPU[a]): SPU[b] =
- sequ(j, pa, (x: a) => lift(i(x)))
-
- def appendByte(a: Array[Byte], b: Int): Array[Byte] =
- Array.concat(a, Array(b.toByte))
-
- def nat2Bytes(x: Int): Array[Byte] = {
- val buf = new mutable.ArrayBuffer[Byte]
- def writeNatPrefix(x: Int) {
- val y = x >>> 7;
- if (y != 0) writeNatPrefix(y);
- buf += ((x & 0x7f) | 0x80).asInstanceOf[Byte];
- }
- val y = x >>> 7;
- if (y != 0) writeNatPrefix(y);
- buf += (x & 0x7f).asInstanceOf[Byte];
- buf.toArray
- }
-
- def nat: SPU[Int] = new SPU[Int] {
- def appP(n: Int, s: PicklerState): PicklerState = {
- new PicklerState(Array.concat(s.stream, nat2Bytes(n)), s.dict);
- }
- def appU(s: UnPicklerState): (Int,UnPicklerState) = {
- var num = 0
- def readNat: Int = {
- var b = 0
- var x = 0
- do {
- b = s.stream(num)
- num += 1
- x = (x << 7) + (b & 0x7f);
- } while ((b & 0x80) != 0);
- x
- }
- (readNat, new UnPicklerState(s.stream.slice(num, s.stream.length), s.dict))
- }
- }
-
- def byte: SPU[Byte] = new SPU[Byte] {
- def appP(b: Byte, s: PicklerState): PicklerState =
- new PicklerState(Array.concat(s.stream, Array(b)), s.dict)
- def appU(s: UnPicklerState): (Byte, UnPicklerState) =
- (s.stream(0), new UnPicklerState(s.stream.slice(1, s.stream.length), s.dict));
- }
-
- def string: SPU[String] = share(wrap(
- (a: Array[Byte]) => (Codec fromUTF8 a).mkString,
- (s: String) => Codec toUTF8 s,
- bytearray
- ))
-
- def bytearray: SPU[Array[Byte]] = {
- wrap((l:List[Byte]) => l.toArray, (_.toList), list(byte))
- }
-
- def bool: SPU[Boolean] = {
- def toEnum(b: Boolean) = if (b) 1 else 0
- def fromEnum(n: Int) = if (n == 0) false else true
- wrap(fromEnum, toEnum, nat)
- }
-
- def ufixedList[A](pa: PU[A])(n: Int): PU[List[A]] = {
- def pairToList(p: (A, List[A])): List[A] =
- p._1 :: p._2;
- def listToPair(l: List[A]): (A, List[A]) =
- (l: @unchecked) match { case x :: xs => (x, xs) }
-
- if (n == 0) ulift(Nil)
- else
- uwrap(pairToList, listToPair, upair(pa, ufixedList(pa)(n-1)))
- }
-
- def fixedList[a](pa: SPU[a])(n: Int): SPU[List[a]] = {
- def pairToList(p: (a,List[a])): List[a] =
- p._1 :: p._2;
- def listToPair(l: List[a]): (a,List[a]) =
- (l: @unchecked) match { case x :: xs => (x, xs) }
-
- if (n == 0) lift(Nil)
- else
- wrap(pairToList, listToPair, pair(pa, fixedList(pa)(n-1)))
- }
-
- def list[a](pa: SPU[a]): SPU[List[a]] =
- sequ((l: List[a])=>l.length, nat, fixedList(pa));
-
- def ulist[a](pa: PU[a]): PU[List[a]] =
- usequ((l:List[a]) => l.length, unat, ufixedList(pa));
-
- def data[a](tag: a => Int, ps: List[()=>SPU[a]]): SPU[a] =
- sequ(tag, nat, (x: Int)=> ps.apply(x)());
-}
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 5d046e48b0..bda4234460 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -43,42 +43,37 @@ class Codec(val charSet: Charset) {
override def toString = name
// these methods can be chained to configure the variables above
- def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this }
- def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this }
- def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this }
+ def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this }
+ def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this }
+ def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this }
def encodingReplaceWith(newReplacement: Array[Byte]): this.type = { _encodingReplacement = newReplacement ; this }
- def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this }
+ def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this }
def name = charSet.name
- def encoder =
- applyFunctions[CharsetEncoder](charSet.newEncoder(),
- (_ onMalformedInput _onMalformedInput, _onMalformedInput != null),
- (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null),
- (_ replaceWith _encodingReplacement, _encodingReplacement != null)
- )
-
- def decoder =
- applyFunctions[CharsetDecoder](charSet.newDecoder(),
- (_ onMalformedInput _onMalformedInput, _onMalformedInput != null),
- (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null),
- (_ replaceWith _decodingReplacement, _decodingReplacement != null)
- )
+ def encoder: CharsetEncoder = {
+ val enc = charSet.newEncoder()
+ if (_onMalformedInput ne null) enc onMalformedInput _onMalformedInput
+ if (_onUnmappableCharacter ne null) enc onUnmappableCharacter _onUnmappableCharacter
+ if (_encodingReplacement ne null) enc replaceWith _encodingReplacement
+ enc
+ }
+ def decoder: CharsetDecoder = {
+ val dec = charSet.newDecoder()
+ if (_onMalformedInput ne null) dec onMalformedInput _onMalformedInput
+ if (_onUnmappableCharacter ne null) dec onUnmappableCharacter _onUnmappableCharacter
+ if (_decodingReplacement ne null) dec replaceWith _decodingReplacement
+ dec
+ }
def wrap(body: => Int): Int =
try body catch { case e: CharacterCodingException => _onCodingException(e) }
-
- // call a series of side effecting methods on an object, finally returning the object
- private def applyFunctions[T](x: T, fs: Configure[T]*) =
- fs.foldLeft(x)((x, pair) => pair match {
- case (f, cond) => if (cond) f(x) else x
- })
}
trait LowPriorityCodecImplicits {
self: Codec.type =>
/** The Codec of Last Resort. */
- implicit def fallbackSystemCodec: Codec = defaultCharsetCodec
+ implicit lazy val fallbackSystemCodec: Codec = defaultCharsetCodec
}
object Codec extends LowPriorityCodecImplicits {
@@ -90,9 +85,9 @@ object Codec extends LowPriorityCodecImplicits {
* the fact that you can influence anything at all via -Dfile.encoding
* as an accident, with any anomalies considered "not a bug".
*/
- def defaultCharsetCodec = apply(Charset.defaultCharset)
- def fileEncodingCodec = apply(scala.util.Properties.encodingString)
- def default = defaultCharsetCodec
+ def defaultCharsetCodec = apply(Charset.defaultCharset)
+ def fileEncodingCodec = apply(scala.util.Properties.encodingString)
+ def default = defaultCharsetCodec
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
def apply(charSet: Charset): Codec = new Codec(charSet)
@@ -130,7 +125,7 @@ object Codec extends LowPriorityCodecImplicits {
bytes
}
- implicit def string2codec(s: String) = apply(s)
- implicit def charset2codec(c: Charset) = apply(c)
- implicit def decoder2codec(cd: CharsetDecoder) = apply(cd)
+ implicit def string2codec(s: String): Codec = apply(s)
+ implicit def charset2codec(c: Charset): Codec = apply(c)
+ implicit def decoder2codec(cd: CharsetDecoder): Codec = apply(cd)
}
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index daa4e103be..b96349803d 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -68,14 +68,6 @@ abstract class Position {
}
object Position extends Position {
- /** The undefined position */
- @deprecated("This will be removed", "2.9.0")
- final val NOPOS = 0
-
- /** The first position in a source file */
- @deprecated("This will be removed", "2.9.0")
- final val FIRSTPOS = encode(1, 1)
-
def checkInput(line: Int, column: Int) {
if (line < 0)
throw new IllegalArgumentException(line + " < 0")
diff --git a/src/library/scala/io/ReadStdin.scala b/src/library/scala/io/ReadStdin.scala
new file mode 100644
index 0000000000..e82c26ef7a
--- /dev/null
+++ b/src/library/scala/io/ReadStdin.scala
@@ -0,0 +1,228 @@
+package scala
+package io
+
+import java.text.MessageFormat
+
+/** private[scala] because this is not functionality we should be providing
+ * in the standard library, at least not in this idiosyncractic form.
+ * Factored into trait because it is better code structure regardless.
+ */
+private[scala] trait ReadStdin {
+ import scala.Console._
+
+ /** Read a full line from the default input. Returns `null` if the end of the
+ * input stream has been reached.
+ *
+ * @return the string read from the terminal or null if the end of stream was reached.
+ */
+ def readLine(): String = in.readLine()
+
+ /** Print formatted text to the default output and read a full line from the default input.
+ * Returns `null` if the end of the input stream has been reached.
+ *
+ * @param text the format of the text to print out, as in `printf`.
+ * @param args the parameters used to instantiate the format, as in `printf`.
+ * @return the string read from the default input
+ */
+ def readLine(text: String, args: Any*): String = {
+ printf(text, args: _*)
+ readLine()
+ }
+
+ /** Reads a boolean value from an entire line of the default input.
+ * Has a fairly liberal interpretation of the input.
+ *
+ * @return the boolean value read, or false if it couldn't be converted to a boolean
+ * @throws java.io.EOFException if the end of the input stream has been reached.
+ */
+ def readBoolean(): Boolean = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toLowerCase() match {
+ case "true" => true
+ case "t" => true
+ case "yes" => true
+ case "y" => true
+ case _ => false
+ }
+ }
+
+ /** Reads a byte value from an entire line of the default input.
+ *
+ * @return the Byte that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte
+ */
+ def readByte(): Byte = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toByte
+ }
+
+ /** Reads a short value from an entire line of the default input.
+ *
+ * @return the short that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Short
+ */
+ def readShort(): Short = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toShort
+ }
+
+ /** Reads a char value from an entire line of the default input.
+ *
+ * @return the Char that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty
+ */
+ def readChar(): Char = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s charAt 0
+ }
+
+ /** Reads an int value from an entire line of the default input.
+ *
+ * @return the Int that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to an Int
+ */
+ def readInt(): Int = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toInt
+ }
+
+ /** Reads an long value from an entire line of the default input.
+ *
+ * @return the Long that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Long
+ */
+ def readLong(): Long = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toLong
+ }
+
+ /** Reads a float value from an entire line of the default input.
+ * @return the Float that was read.
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ *
+ */
+ def readFloat(): Float = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toFloat
+ }
+
+ /** Reads a double value from an entire line of the default input.
+ *
+ * @return the Double that was read.
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ */
+ def readDouble(): Double = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toDouble
+ }
+
+ /** Reads in some structured input (from the default input), specified by
+ * a format specifier. See class `java.text.MessageFormat` for details of
+ * the format specification.
+ *
+ * @param format the format of the input.
+ * @return a list of all extracted values.
+ * @throws java.io.EOFException if the end of the input stream has been
+ * reached.
+ */
+ def readf(format: String): List[Any] = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ textComponents(new MessageFormat(format).parse(s))
+ }
+
+ /** Reads in some structured input (from the default input), specified by
+ * a format specifier, returning only the first value extracted, according
+ * to the format specification.
+ *
+ * @param format format string, as accepted by `readf`.
+ * @return The first value that was extracted from the input
+ */
+ def readf1(format: String): Any = readf(format).head
+
+ /** Reads in some structured input (from the default input), specified
+ * by a format specifier, returning only the first two values extracted,
+ * according to the format specification.
+ *
+ * @param format format string, as accepted by `readf`.
+ * @return A [[scala.Tuple2]] containing the first two values extracted
+ */
+ def readf2(format: String): (Any, Any) = {
+ val res = readf(format)
+ (res.head, res.tail.head)
+ }
+
+ /** Reads in some structured input (from the default input), specified
+ * by a format specifier, returning only the first three values extracted,
+ * according to the format specification.
+ *
+ * @param format format string, as accepted by `readf`.
+ * @return A [[scala.Tuple3]] containing the first three values extracted
+ */
+ def readf3(format: String): (Any, Any, Any) = {
+ val res = readf(format)
+ (res.head, res.tail.head, res.tail.tail.head)
+ }
+
+ private def textComponents(a: Array[AnyRef]): List[Any] = {
+ var i: Int = a.length - 1
+ var res: List[Any] = Nil
+ while (i >= 0) {
+ res = (a(i) match {
+ case x: java.lang.Boolean => x.booleanValue()
+ case x: java.lang.Byte => x.byteValue()
+ case x: java.lang.Short => x.shortValue()
+ case x: java.lang.Character => x.charValue()
+ case x: java.lang.Integer => x.intValue()
+ case x: java.lang.Long => x.longValue()
+ case x: java.lang.Float => x.floatValue()
+ case x: java.lang.Double => x.doubleValue()
+ case x => x
+ }) :: res
+ i -= 1
+ }
+ res
+ }
+}
+
+object ReadStdin extends ReadStdin { }
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index b13729aefe..f976c7eb0a 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -194,11 +194,11 @@ abstract class Source extends Iterator[Char] {
lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered
def isNewline(ch: Char) = ch == '\r' || ch == '\n'
def getc() = iter.hasNext && {
- val ch = iter.next
+ val ch = iter.next()
if (ch == '\n') false
else if (ch == '\r') {
if (iter.hasNext && iter.head == '\n')
- iter.next
+ iter.next()
false
}
@@ -209,7 +209,7 @@ abstract class Source extends Iterator[Char] {
}
def hasNext = iter.hasNext
def next = {
- sb.clear
+ sb.clear()
while (getc()) { }
sb.toString
}
@@ -227,7 +227,7 @@ abstract class Source extends Iterator[Char] {
/** Returns next character.
*/
- def next(): Char = positioner.next
+ def next(): Char = positioner.next()
class Positioner(encoder: Position) {
def this() = this(RelaxedPosition)
@@ -245,7 +245,7 @@ abstract class Source extends Iterator[Char] {
var tabinc = 4
def next(): Char = {
- ch = iter.next
+ ch = iter.next()
pos = encoder.encode(cline, ccol)
ch match {
case '\n' =>
@@ -267,7 +267,7 @@ abstract class Source extends Iterator[Char] {
}
object RelaxedPositioner extends Positioner(RelaxedPosition) { }
object NoPositioner extends Positioner(Position) {
- override def next(): Char = iter.next
+ override def next(): Char = iter.next()
}
def ch = positioner.ch
def pos = positioner.pos
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
deleted file mode 100644
index e4c2145153..0000000000
--- a/src/library/scala/io/UTF8Codec.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.io
-
-/**
- * @author Martin Odersky
- * @version 1.0, 04/10/2004
- */
-@deprecated("This class will be removed.", "2.10.0")
-object UTF8Codec {
- final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
- final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
-
- // Note, from http://unicode.org/faq/utf_bom.html#utf8-5
- //
- // A different issue arises if an unpaired surrogate is encountered when converting
- // ill-formed UTF-16 data. By represented such an unpaired surrogate on its own as a
- // 3-byte sequence, the resulting UTF-8 data stream would become ill-formed.
- // While it faithfully reflects the nature of the input, Unicode conformance
- // requires that encoding form conversion always results in valid data stream.
- // Therefore a converter must treat this as an error.
- //
- // Some useful locations:
- // http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
-}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 7c14ed3a9e..d8f4337b8f 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -25,12 +25,6 @@ object BigDecimal {
private val maxCached = 512
val defaultMathContext = MathContext.DECIMAL128
- @deprecated("Use Long.MinValue", "2.9.0")
- val MinLong = new BigDecimal(BigDec valueOf Long.MinValue, defaultMathContext)
-
- @deprecated("Use Long.MaxValue", "2.9.0")
- val MaxLong = new BigDecimal(BigDec valueOf Long.MaxValue, defaultMathContext)
-
/** Cache ony for defaultMathContext using BigDecimals in a small range. */
private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
@@ -177,7 +171,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* with unequal hashCodes.
*/
override def hashCode(): Int =
- if (isWhole) unifiedPrimitiveHashcode
+ if (isWhole()) unifiedPrimitiveHashcode()
else doubleValue.##
/** Compares this BigDecimal with the specified value for equality.
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index fdba0ec716..719099b405 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -23,12 +23,6 @@ object BigInt {
private val cache = new Array[BigInt](maxCached - minCached + 1)
private val minusOne = BigInteger.valueOf(-1)
- @deprecated("Use Long.MinValue", "2.9.0")
- val MinLong = BigInt(Long.MinValue)
-
- @deprecated("Use Long.MaxValue", "2.9.0")
- val MaxLong = BigInt(Long.MaxValue)
-
/** Constructs a `BigInt` whose value is equal to that of the
* specified integer value.
*
@@ -118,7 +112,7 @@ object BigInt {
class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
- if (isValidLong) unifiedPrimitiveHashcode
+ if (isValidLong) unifiedPrimitiveHashcode()
else bigInteger.##
/** Compares this BigInt with the specified value for equality.
@@ -295,9 +289,6 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
*/
def signum: Int = this.bigInteger.signum()
- @deprecated("Use ~bigInt (the unary_~ method) instead", "2.10.0")
- def ~ : BigInt = ~this
-
/** Returns the bitwise complement of this BigInt
*/
def unary_~ : BigInt = new BigInt(this.bigInteger.not())
@@ -364,7 +355,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def charValue = intValue.toChar
/** Converts this BigInt to an <tt>int</tt>.
- * If the BigInt is too big to fit in a int, only the low-order 32 bits
+ * If the BigInt is too big to fit in an int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigInt value as well as return a result with
* the opposite sign.
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index e9b92541c2..d1a4e7c35c 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -33,7 +33,7 @@ import scala.language.{implicitConversions, higherKinds}
* }}}
*
* An Ordering[T] is implemented by specifying compare(a:T, b:T), which
- * decides how to order to instances a and b. Instances of Ordering[T] can be
+ * decides how to order two instances a and b. Instances of Ordering[T] can be
* used by things like scala.util.Sorting to sort collections like Array[T].
*
* For example:
@@ -173,7 +173,7 @@ object Ordering extends LowPriorityOrderingImplicits {
val ye = y.iterator
while (xe.hasNext && ye.hasNext) {
- val res = ord.compare(xe.next, ye.next)
+ val res = ord.compare(xe.next(), ye.next())
if (res != 0) return res
}
@@ -347,7 +347,7 @@ object Ordering extends LowPriorityOrderingImplicits {
val ye = y.iterator
while (xe.hasNext && ye.hasNext) {
- val res = ord.compare(xe.next, ye.next)
+ val res = ord.compare(xe.next(), ye.next())
if (res != 0) return res
}
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
index 6ddf48d03b..e748841c12 100644
--- a/src/library/scala/math/ScalaNumericConversions.scala
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -8,8 +8,6 @@
package scala.math
-import java.{ lang => jl }
-
/** A slightly more specific conversion trait for classes which
* extend ScalaNumber (which excludes value classes.)
*/
@@ -34,37 +32,37 @@ trait ScalaNumericAnyConversions extends Any {
/** Returns the value of this as a [[scala.Char]]. This may involve
* rounding or truncation.
*/
- def toChar = intValue.toChar
+ def toChar = intValue().toChar
/** Returns the value of this as a [[scala.Byte]]. This may involve
* rounding or truncation.
*/
- def toByte = byteValue
+ def toByte = byteValue()
/** Returns the value of this as a [[scala.Short]]. This may involve
* rounding or truncation.
*/
- def toShort = shortValue
+ def toShort = shortValue()
/** Returns the value of this as an [[scala.Int]]. This may involve
* rounding or truncation.
*/
- def toInt = intValue
+ def toInt = intValue()
/** Returns the value of this as a [[scala.Long]]. This may involve
* rounding or truncation.
*/
- def toLong = longValue
+ def toLong = longValue()
/** Returns the value of this as a [[scala.Float]]. This may involve
* rounding or truncation.
*/
- def toFloat = floatValue
+ def toFloat = floatValue()
/** Returns the value of this as a [[scala.Double]]. This may involve
* rounding or truncation.
*/
- def toDouble = doubleValue
+ def toDouble = doubleValue()
/** Returns `true` iff this has a zero fractional part, and is within the
* range of [[scala.Byte]] MinValue and MaxValue; otherwise returns `false`.
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 84f6f0be9c..224112c11c 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -34,12 +34,6 @@ package object scala {
override def toString = "object AnyRef"
}
- @deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
- type serializable = annotation.serializable
-
- @deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
- type cloneable = annotation.cloneable
-
type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
type Traversable[+A] = scala.collection.Traversable[A]
@@ -95,7 +89,10 @@ package object scala {
val Equiv = scala.math.Equiv
type Fractional[T] = scala.math.Fractional[T]
+ val Fractional = scala.math.Fractional
+
type Integral[T] = scala.math.Integral[T]
+ val Integral = scala.math.Integral
type Numeric[T] = scala.math.Numeric[T]
val Numeric = scala.math.Numeric
@@ -121,14 +118,12 @@ package object scala {
// Annotations which we might move to annotation.*
/*
type SerialVersionUID = annotation.SerialVersionUID
- type cloneable = annotation.cloneable
type deprecated = annotation.deprecated
type deprecatedName = annotation.deprecatedName
type inline = annotation.inline
type native = annotation.native
- type noinline = noannotation.inline
+ type noinline = annotation.noinline
type remote = annotation.remote
- type serializable = annotation.serializable
type specialized = annotation.specialized
type transient = annotation.transient
type throws = annotation.throws
diff --git a/src/library/scala/parallel/Future.scala b/src/library/scala/parallel/Future.scala
deleted file mode 100644
index e255a5772b..0000000000
--- a/src/library/scala/parallel/Future.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.parallel
-
-
-
-/** A future is a function without parameters that will block the caller if
- * the parallel computation associated with the function is not completed.
- *
- * @tparam R the type of the result
- *
- * @since 2.9
- */
-@deprecated("Use `scala.concurrent.Future` instead.", "2.10.0")
-trait Future[@specialized +R] extends (() => R) {
- /** Returns a result once the parallel computation completes. If the
- * computation produced an exception, an exception is forwarded.
- *
- * '''Note:''' creating a circular dependency between futures by calling
- * this method will result in a deadlock.
- *
- * @return the result
- * @throws the exception that was thrown during a parallel computation
- */
- def apply(): R
-
- /** Returns `true` if the parallel computation is completed.
- *
- * @return `true` if the parallel computation is completed, `false` otherwise
- */
- def isDone(): Boolean
-}
-
diff --git a/src/library/scala/parallel/package.scala.disabled b/src/library/scala/parallel/package.scala.disabled
deleted file mode 100644
index 45f5470d03..0000000000
--- a/src/library/scala/parallel/package.scala.disabled
+++ /dev/null
@@ -1,178 +0,0 @@
-package scala
-
-
-
-import scala.concurrent.forkjoin._
-
-
-/** This package object contains various parallel operations.
- *
- * @define invokingPar
- * Invoking a parallel computation creates a future which will
- * hold the result of the computation once it completes. Querying
- * the result of a future before its parallel computation has completed
- * will block the caller. For all practical concerns, the dependency
- * chain obtained by querying results of unfinished futures can have
- * arbitrary lengths. However, care must be taken not to create a
- * circular dependency, as this will result in a deadlock.
- *
- * Additionally, if the parallel computation performs a blocking call
- * (e.g. an I/O operation or waiting for a lock) other than waiting for a future,
- * it should do so by invoking the `block` method. This is another
- * form of waiting that could potentially create a circular dependency,
- * an the user should take care not to do this.
- *
- * Users should be aware that invoking a parallel computation has a
- * certain overhead. Parallel computations should not be invoked for
- * small computations, as this can lead to bad performance. A rule of the
- * thumb is having parallel computations equivalent to a loop
- * with 50000 arithmetic operations (at least). If a parallel computation
- * is invoked within another parallel computation, then it should be
- * computationally equivalent to a loop with 10000 arithmetic operations.
- */
-package object parallel {
-
- private[scala] val forkjoinpool = new ForkJoinPool()
-
- private class Task[T](body: =>T) extends RecursiveTask[T] with Future[T] {
- def compute = body
- def apply() = join()
- }
-
- private final def newTask[T](body: =>T) = new Task[T](body)
-
- private final def executeTask[T](task: RecursiveTask[T]) {
- if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) task.fork
- else forkjoinpool.execute(task)
- }
-
- /* public methods */
-
- /** Performs a call which can potentially block execution.
- *
- * Example:
- * {{{
- * val lock = new ReentrantLock
- *
- * // ... do something ...
- *
- * blocking {
- * if (!lock.hasLock) lock.lock()
- * }
- * }}}
- *
- * '''Note:''' calling methods that wait arbitrary amounts of time
- * (e.g. for I/O operations or locks) may severely decrease performance
- * or even result in deadlocks. This does not include waiting for
- * results of futures.
- */
- def blocking[T](body: =>T): T = {
- if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) {
- val blocker = new ForkJoinPool.ManagedBlocker {
- @volatile var done = false
- @volatile var result: Any = _
- def block() = {
- result = body
- done = true
- true
- }
- def isReleasable() = done
- }
- ForkJoinPool.managedBlock(blocker, true)
- blocker.result.asInstanceOf[T]
- } else body
- }
-
- /** Starts a parallel computation and returns a future.
- *
- * $invokingPar
- *
- * @tparam T the type of the result of the parallel computation
- * @param body the computation to be invoked in parallel
- * @return a future with the result
- */
- def par[T](body: =>T): Future[T] = {
- val task = newTask(body)
- executeTask(task)
- task
- }
-
- /** Starts 2 parallel computations and returns a future.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @return a tuple of futures corresponding to parallel computations
- */
- def par[T1, T2](b1: =>T1, b2: =>T2): (Future[T1], Future[T2]) = {
- val t1 = newTask(b1)
- executeTask(t1)
- val t2 = newTask(b2)
- executeTask(t2)
- (t1, t2)
- }
-
- /** Starts 3 parallel computations and returns a future.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @return a tuple of futures corresponding to parallel computations
- */
- def par[T1, T2, T3](b1: =>T1, b2: =>T2, b3: =>T3): (Future[T1], Future[T2], Future[T3]) = {
- val t1 = newTask(b1)
- executeTask(t1)
- val t2 = newTask(b2)
- executeTask(t2)
- val t3 = newTask(b3)
- executeTask(t3)
- (t1, t2, t3)
- }
-
- /** Starts 4 parallel computations and returns a future.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @tparam T4 the type of the result of 4th the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @param b4 the 4th computation to be invoked in parallel
- * @return a tuple of futures corresponding to parallel computations
- */
- def par[T1, T2, T3, T4](b1: =>T1, b2: =>T2, b3: =>T3, b4: =>T4): (Future[T1], Future[T2], Future[T3], Future[T4]) = {
- val t1 = newTask(b1)
- executeTask(t1)
- val t2 = newTask(b2)
- executeTask(t2)
- val t3 = newTask(b3)
- executeTask(t3)
- val t4 = newTask(b4)
- executeTask(t4)
- (t1, t2, t3, t4)
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index b414db6e97..e4ce667981 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -13,7 +13,8 @@ package scala.ref
* @author Sean McDirmid
*/
class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] {
- def this(value : T) = this(value, null);
+ def this(value : T) = this(value, null)
+
val underlying: java.lang.ref.SoftReference[_ <: T] =
new SoftReferenceWithWrapper[T](value, queue, this)
}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index 384ebc6134..8a1cce6b02 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -93,8 +93,8 @@ object NameTransformer {
*/
def decode(name0: String): String = {
//System.out.println("decode: " + name);//DEBUG
- val name = if (name0.endsWith("<init>")) name0.substring(0, name0.length() - ("<init>").length()) + "this"
- else name0;
+ val name = if (name0.endsWith("<init>")) name0.stripSuffix("<init>") + "this"
+ else name0
var buf: StringBuilder = null
val len = name.length()
var i = 0
diff --git a/src/library/scala/runtime/AbstractFunction0.scala b/src/library/scala/runtime/AbstractFunction0.scala
index 1b351c62ae..1e677e8008 100644
--- a/src/library/scala/runtime/AbstractFunction0.scala
+++ b/src/library/scala/runtime/AbstractFunction0.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
index a68a82e6a2..8d68017a6f 100644
--- a/src/library/scala/runtime/AbstractFunction1.scala
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction10.scala b/src/library/scala/runtime/AbstractFunction10.scala
index 72c0a2e69d..776f52238d 100644
--- a/src/library/scala/runtime/AbstractFunction10.scala
+++ b/src/library/scala/runtime/AbstractFunction10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction11.scala b/src/library/scala/runtime/AbstractFunction11.scala
index 031f3044a1..76cd8fbb3c 100644
--- a/src/library/scala/runtime/AbstractFunction11.scala
+++ b/src/library/scala/runtime/AbstractFunction11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction12.scala b/src/library/scala/runtime/AbstractFunction12.scala
index 9823edbc60..10066ed4b3 100644
--- a/src/library/scala/runtime/AbstractFunction12.scala
+++ b/src/library/scala/runtime/AbstractFunction12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction13.scala b/src/library/scala/runtime/AbstractFunction13.scala
index 528719b216..6c3a45734c 100644
--- a/src/library/scala/runtime/AbstractFunction13.scala
+++ b/src/library/scala/runtime/AbstractFunction13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction14.scala b/src/library/scala/runtime/AbstractFunction14.scala
index ecae45a107..bf2b6736f4 100644
--- a/src/library/scala/runtime/AbstractFunction14.scala
+++ b/src/library/scala/runtime/AbstractFunction14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction15.scala b/src/library/scala/runtime/AbstractFunction15.scala
index 5f5e8afcf7..5136f666c8 100644
--- a/src/library/scala/runtime/AbstractFunction15.scala
+++ b/src/library/scala/runtime/AbstractFunction15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction16.scala b/src/library/scala/runtime/AbstractFunction16.scala
index c0093c4fbb..dbafab8301 100644
--- a/src/library/scala/runtime/AbstractFunction16.scala
+++ b/src/library/scala/runtime/AbstractFunction16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction17.scala b/src/library/scala/runtime/AbstractFunction17.scala
index caae343a79..9c36dbf5d8 100644
--- a/src/library/scala/runtime/AbstractFunction17.scala
+++ b/src/library/scala/runtime/AbstractFunction17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction18.scala b/src/library/scala/runtime/AbstractFunction18.scala
index 9a2bdffee1..30eee9586f 100644
--- a/src/library/scala/runtime/AbstractFunction18.scala
+++ b/src/library/scala/runtime/AbstractFunction18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction19.scala b/src/library/scala/runtime/AbstractFunction19.scala
index 1dbbd61004..14baf5f1eb 100644
--- a/src/library/scala/runtime/AbstractFunction19.scala
+++ b/src/library/scala/runtime/AbstractFunction19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction2.scala b/src/library/scala/runtime/AbstractFunction2.scala
index 0905ea178c..223ade9983 100644
--- a/src/library/scala/runtime/AbstractFunction2.scala
+++ b/src/library/scala/runtime/AbstractFunction2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction20.scala b/src/library/scala/runtime/AbstractFunction20.scala
index eb4c085427..f5c29571bf 100644
--- a/src/library/scala/runtime/AbstractFunction20.scala
+++ b/src/library/scala/runtime/AbstractFunction20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction21.scala b/src/library/scala/runtime/AbstractFunction21.scala
index 98e32b22f1..15feea3a66 100644
--- a/src/library/scala/runtime/AbstractFunction21.scala
+++ b/src/library/scala/runtime/AbstractFunction21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction22.scala b/src/library/scala/runtime/AbstractFunction22.scala
index 67b13399d1..d77369ff01 100644
--- a/src/library/scala/runtime/AbstractFunction22.scala
+++ b/src/library/scala/runtime/AbstractFunction22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction3.scala b/src/library/scala/runtime/AbstractFunction3.scala
index 3a45cdcea5..f863509214 100644
--- a/src/library/scala/runtime/AbstractFunction3.scala
+++ b/src/library/scala/runtime/AbstractFunction3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction4.scala b/src/library/scala/runtime/AbstractFunction4.scala
index fbf55344f6..5927015ef8 100644
--- a/src/library/scala/runtime/AbstractFunction4.scala
+++ b/src/library/scala/runtime/AbstractFunction4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction5.scala b/src/library/scala/runtime/AbstractFunction5.scala
index 949bae8ab7..411e1e14bf 100644
--- a/src/library/scala/runtime/AbstractFunction5.scala
+++ b/src/library/scala/runtime/AbstractFunction5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction6.scala b/src/library/scala/runtime/AbstractFunction6.scala
index 337fd9f3fc..411c30d480 100644
--- a/src/library/scala/runtime/AbstractFunction6.scala
+++ b/src/library/scala/runtime/AbstractFunction6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction7.scala b/src/library/scala/runtime/AbstractFunction7.scala
index 24458678ad..498f98633a 100644
--- a/src/library/scala/runtime/AbstractFunction7.scala
+++ b/src/library/scala/runtime/AbstractFunction7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction8.scala b/src/library/scala/runtime/AbstractFunction8.scala
index 6d3dac849e..c6d320b887 100644
--- a/src/library/scala/runtime/AbstractFunction8.scala
+++ b/src/library/scala/runtime/AbstractFunction8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction9.scala b/src/library/scala/runtime/AbstractFunction9.scala
index 43cf3d2a74..34bd9d7107 100644
--- a/src/library/scala/runtime/AbstractFunction9.scala
+++ b/src/library/scala/runtime/AbstractFunction9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala
index 797b31583d..25b797a606 100644
--- a/src/library/scala/runtime/Null$.scala
+++ b/src/library/scala/runtime/Null$.scala
@@ -11,6 +11,7 @@ package scala.runtime
/**
* Dummy class which exist only to satisfy the JVM. It corresponds to
* `scala.Null`. If such type appears in method signatures, it is erased
- * to this one.
+ * to this one. A private constructor ensures that Java code can't create
+ * subclasses. The only value of type Null$ should be null
*/
-sealed abstract class Null$
+sealed abstract class Null$ private ()
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index 94c4137674..cf4eb71ded 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -10,6 +10,7 @@ package scala.runtime
import scala.compat.Platform.EOL
+@deprecated("Use Throwable#getStackTrace", "2.11.0")
final class RichException(exc: Throwable) {
def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
}
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index 76fc38b267..e8460a203b 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -28,8 +28,8 @@ trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed
def floatValue() = num.toFloat(self)
def longValue() = num.toLong(self)
def intValue() = num.toInt(self)
- def byteValue() = intValue.toByte
- def shortValue() = intValue.toShort
+ def byteValue() = intValue().toByte
+ def shortValue() = intValue().toShort
def min(that: T): T = num.min(self, that)
def max(that: T): T = num.max(self, that)
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 1d8fe5e9ad..753dd0205e 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -26,8 +26,7 @@ import java.lang.reflect.{ Modifier, Method => JMethod }
* outside the API and subject to change or removal without notice.
*/
object ScalaRunTime {
- def isArray(x: AnyRef): Boolean = isArray(x, 1)
- def isArray(x: Any, atLevel: Int): Boolean =
+ def isArray(x: Any, atLevel: Int = 1): Boolean =
x != null && isArrayClass(x.getClass, atLevel)
private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
@@ -228,7 +227,7 @@ object ScalaRunTime {
if (iv == fv) return iv
val lv = fv.toLong
- if (lv == fv) return hash(lv)
+ if (lv == fv) hash(lv)
else fv.hashCode
}
def hash(lv: Long): Int = {
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index d2084a6598..ce7d7afc9e 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -11,6 +11,7 @@ package runtime
import java.util.Arrays.copyOfRange
+@deprecated("Use Predef.SeqCharSequence", "2.11.0")
final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
def length: Int = xs.length
def charAt(index: Int): Char = xs(index)
@@ -18,6 +19,8 @@ final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends C
override def toString = xs.mkString("")
}
+// Still need this one since the implicit class ArrayCharSequence only converts
+// a single argument.
final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence {
// yikes
// java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: <init> signature: ([C)V)
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index 9d848f0ba7..1456d9a4e4 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -9,6 +9,7 @@
package scala.runtime
/** A wrapper class that adds string concatenation `+` to any value */
+@deprecated("Use Predef.StringAdd", "2.11.0")
final class StringAdd(val self: Any) extends AnyVal {
def +(other: String) = String.valueOf(self) + other
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index 983ae2fc54..21e5efd1fc 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -10,6 +10,7 @@ package scala.runtime
/** A wrapper class that adds a `formatted` operation to any value
*/
+@deprecated("Use Predef.StringFormat", "2.11.0")
final class StringFormat(val self: Any) extends AnyVal {
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index ef29075ac3..bde69a0f54 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -37,12 +37,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext)
- b += f(el1, elems2.next)
+ b += f(el1, elems2.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
@@ -51,12 +51,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext)
- b ++= f(el1, elems2.next)
+ b ++= f(el1, elems2.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
@@ -66,16 +66,16 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext) {
- val el2 = elems2.next
+ val el2 = elems2.next()
if (f(el1, el2)) {
b1 += el1
b2 += el2
}
}
- else return (b1.result, b2.result)
+ else return (b1.result(), b2.result())
}
- (b1.result, b2.result)
+ (b1.result(), b2.result())
}
def exists(f: (El1, El2) => Boolean): Boolean = {
@@ -83,7 +83,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext) {
- if (f(el1, elems2.next))
+ if (f(el1, elems2.next()))
return true
}
else return false
@@ -99,7 +99,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext)
- f(el1, elems2.next)
+ f(el1, elems2.next())
else
return
}
@@ -117,9 +117,9 @@ object Tuple2Zipped {
val it1 = x._1.toIterator
val it2 = x._2.toIterator
while (it1.hasNext && it2.hasNext)
- buf += ((it1.next, it2.next))
+ buf += ((it1.next(), it2.next()))
- buf.result
+ buf.result()
}
def zipped[El1, Repr1, El2, Repr2]
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 3f2afaf772..34da42462a 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -34,11 +34,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
- b += f(el1, elems2.next, elems3.next)
+ b += f(el1, elems2.next(), elems3.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
@@ -48,11 +48,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
- b ++= f(el1, elems2.next, elems3.next)
+ b ++= f(el1, elems2.next(), elems3.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
@@ -64,12 +64,12 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
val b3 = cbf3(colls._3.repr)
val elems2 = colls._2.iterator
val elems3 = colls._3.iterator
- def result = (b1.result, b2.result, b3.result)
+ def result = (b1.result(), b2.result(), b3.result())
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
- val el2 = elems2.next
- val el3 = elems3.next
+ val el2 = elems2.next()
+ val el3 = elems3.next()
if (f(el1, el2, el3)) {
b1 += el1
@@ -89,7 +89,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
- if (f(el1, elems2.next, elems3.next))
+ if (f(el1, elems2.next(), elems3.next()))
return true
}
else return false
@@ -106,7 +106,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
- f(el1, elems2.next, elems3.next)
+ f(el1, elems2.next(), elems3.next())
else
return
}
@@ -126,9 +126,9 @@ object Tuple3Zipped {
val it2 = x._2.toIterator
val it3 = x._3.toIterator
while (it1.hasNext && it2.hasNext && it3.hasNext)
- buf += ((it1.next, it2.next, it3.next))
+ buf += ((it1.next(), it2.next(), it3.next()))
- buf.result
+ buf.result()
}
def zipped[El1, Repr1, El2, Repr2, El3, Repr3]
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index da9adb3dc2..294be5cd71 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -64,7 +64,6 @@ object SystemProperties {
propertyHelp(p.key) = helpText
p
}
- private def str(key: String, helpText: String) = addHelp(Prop[String](key), helpText)
private def bool(key: String, helpText: String): BooleanProp = addHelp[BooleanProp](
if (key startsWith "java.") BooleanProp.valueIsTrue(key) else BooleanProp.keyExists(key),
helpText
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 0003df6c52..e2c4f13830 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -46,7 +46,7 @@ object BasicIO {
def next(): Stream[T] = q.take match {
case Left(0) => Stream.empty
case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
- case Right(s) => Stream.cons(s, next)
+ case Right(s) => Stream.cons(s, next())
}
new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
}
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index d0b2ecfe73..5d89e45001 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -23,7 +23,7 @@ import ProcessBuilder._
* based on these factories made available in the package object
* [[scala.sys.process]]. Here are some examples:
* {{{
- * import.scala.sys.process._
+ * import scala.sys.process._
*
* // Executes "ls" and sends output to stdout
* "ls".!
@@ -46,14 +46,14 @@ import ProcessBuilder._
*
* Two existing `ProcessBuilder` can be combined in the following ways:
*
- * * They can be executed in parallel, with the output of the first being fed
- * as input to the second, like Unix pipes. This is achieved with the `#|`
- * method.
- * * They can be executed in sequence, with the second starting as soon as
- * the first ends. This is done by the `###` method.
- * * The execution of the second one can be conditioned by the return code
- * (exit status) of the first, either only when it's zero, or only when it's
- * not zero. The methods `#&&` and `#||` accomplish these tasks.
+ * - They can be executed in parallel, with the output of the first being fed
+ * as input to the second, like Unix pipes. This is achieved with the `#|`
+ * method.
+ * - They can be executed in sequence, with the second starting as soon as
+ * the first ends. This is done by the `###` method.
+ * - The execution of the second one can be conditioned by the return code
+ * (exit status) of the first, either only when it's zero, or only when it's
+ * not zero. The methods `#&&` and `#||` accomplish these tasks.
*
* ==Redirecting Input/Output==
*
@@ -74,18 +74,18 @@ import ProcessBuilder._
* overloads and variations to enable further control over the I/O. These
* methods are:
*
- * * `run`: the most general method, it returns a
- * [[scala.sys.process.Process]] immediately, and the external command
- * executes concurrently.
- * * `!`: blocks until all external commands exit, and returns the exit code
- * of the last one in the chain of execution.
- * * `!!`: blocks until all external commands exit, and returns a `String`
- * with the output generated.
- * * `lines`: returns immediately like `run`, and the output being generared
- * is provided through a `Stream[String]`. Getting the next element of that
- * `Stream` may block until it becomes available. This method will throw an
- * exception if the return code is different than zero -- if this is not
- * desired, use the `lines_!` method.
+ * - `run`: the most general method, it returns a
+ * [[scala.sys.process.Process]] immediately, and the external command
+ * executes concurrently.
+ * - `!`: blocks until all external commands exit, and returns the exit code
+ * of the last one in the chain of execution.
+ * - `!!`: blocks until all external commands exit, and returns a `String`
+ * with the output generated.
+ * - `lines`: returns immediately like `run`, and the output being generared
+ * is provided through a `Stream[String]`. Getting the next element of that
+ * `Stream` may block until it becomes available. This method will throw an
+ * exception if the return code is different than zero -- if this is not
+ * desired, use the `lines_!` method.
*
* ==Handling Input and Output==
*
@@ -305,10 +305,10 @@ object ProcessBuilder extends ProcessBuilderImpl {
protected def toSource: ProcessBuilder
/** Writes the output stream of this process to the given file. */
- def #> (f: File): ProcessBuilder = toFile(f, false)
+ def #> (f: File): ProcessBuilder = toFile(f, append = false)
/** Appends the output stream of this process to the given file. */
- def #>> (f: File): ProcessBuilder = toFile(f, true)
+ def #>> (f: File): ProcessBuilder = toFile(f, append = true)
/** Writes the output stream of this process to the given OutputStream. The
* argument is call-by-name, so the stream is recreated, written, and closed each
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 49fea6f464..91e267d5e4 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -69,7 +69,7 @@ private[process] trait ProcessBuilderImpl {
import io._
// spawn threads that process the input, output, and error streams using the functions defined in `io`
- val inThread = Spawn(writeInput(process.getOutputStream), true)
+ val inThread = Spawn(writeInput(process.getOutputStream), daemon = true)
val outThread = Spawn(processOutput(process.getInputStream), daemonizeThreads)
val errorThread =
if (p.redirectErrorStream) Nil
@@ -93,26 +93,26 @@ private[process] trait ProcessBuilderImpl {
def #&&(other: ProcessBuilder): ProcessBuilder = new AndBuilder(this, other)
def ###(other: ProcessBuilder): ProcessBuilder = new SequenceBuilder(this, other)
- def run(): Process = run(false)
+ def run(): Process = run(connectInput = false)
def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput))
- def run(log: ProcessLogger): Process = run(log, false)
+ def run(log: ProcessLogger): Process = run(log, connectInput = false)
def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(connectInput, log))
- def !! = slurp(None, false)
- def !!(log: ProcessLogger) = slurp(Some(log), false)
- def !!< = slurp(None, true)
- def !!<(log: ProcessLogger) = slurp(Some(log), true)
+ def !! = slurp(None, withIn = false)
+ def !!(log: ProcessLogger) = slurp(Some(log), withIn = false)
+ def !!< = slurp(None, withIn = true)
+ def !!<(log: ProcessLogger) = slurp(Some(log), withIn = true)
- def lines: Stream[String] = lines(false, true, None)
- def lines(log: ProcessLogger): Stream[String] = lines(false, true, Some(log))
- def lines_! : Stream[String] = lines(false, false, None)
- def lines_!(log: ProcessLogger): Stream[String] = lines(false, false, Some(log))
+ def lines: Stream[String] = lines(withInput = false, nonZeroException = true, None)
+ def lines(log: ProcessLogger): Stream[String] = lines(withInput = false, nonZeroException = true, Some(log))
+ def lines_! : Stream[String] = lines(withInput = false, nonZeroException = false, None)
+ def lines_!(log: ProcessLogger): Stream[String] = lines(withInput = false, nonZeroException = false, Some(log))
- def ! = run(false).exitValue()
+ def ! = run(connectInput = false).exitValue()
def !(io: ProcessIO) = run(io).exitValue()
- def !(log: ProcessLogger) = runBuffered(log, false)
- def !< = run(true).exitValue()
- def !<(log: ProcessLogger) = runBuffered(log, true)
+ def !(log: ProcessLogger) = runBuffered(log, connectInput = false)
+ def !< = run(connectInput = true).exitValue()
+ def !<(log: ProcessLogger) = runBuffered(log, connectInput = true)
/** Constructs a new builder which runs this command with all input/output threads marked
* as daemon threads. This allows the creation of a long running process while still
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index c21c0daa5e..c64ba246fc 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -17,7 +17,7 @@ private[process] trait ProcessImpl {
/** Runs provided code in a new Thread and returns the Thread instance. */
private[process] object Spawn {
- def apply(f: => Unit): Thread = apply(f, false)
+ def apply(f: => Unit): Thread = apply(f, daemon = false)
def apply(f: => Unit, daemon: Boolean): Thread = {
val thread = new Thread() { override def run() = { f } }
thread.setDaemon(daemon)
@@ -32,7 +32,7 @@ private[process] trait ProcessImpl {
try result set Right(f)
catch { case e: Exception => result set Left(e) }
- Spawn(run)
+ Spawn(run())
() => result.get match {
case Right(value) => value
@@ -68,10 +68,10 @@ private[process] trait ProcessImpl {
protected[this] override def runAndExitValue() = {
val first = a.run(io)
- runInterruptible(first.exitValue)(first.destroy()) flatMap { codeA =>
+ runInterruptible(first.exitValue())(first.destroy()) flatMap { codeA =>
if (evaluateSecondProcess(codeA)) {
val second = b.run(io)
- runInterruptible(second.exitValue)(second.destroy())
+ runInterruptible(second.exitValue())(second.destroy())
}
else Some(codeA)
}
@@ -132,10 +132,10 @@ private[process] trait ProcessImpl {
val first = a.run(firstIO)
try {
runInterruptible {
- val exit1 = first.exitValue
+ val exit1 = first.exitValue()
currentSource put None
currentSink put None
- val exit2 = second.exitValue
+ val exit2 = second.exitValue()
// Since file redirection (e.g. #>) is implemented as a piped process,
// we ignore its exit value so cmd #> file doesn't always return 0.
if (b.hasExitValue) exit2 else exit1
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index ed436febc0..902543665f 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -25,7 +25,7 @@ package scala.sys {
*
* {{{
* import scala.sys.process._
- * "ls" #| "grep .scala" #&& "scalac *.scala" #|| "echo nothing found" lines
+ * "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lines
* }}}
*
* We describe below the general concepts and architecture of the package,
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
deleted file mode 100644
index 66d7d448eb..0000000000
--- a/src/library/scala/testing/Benchmark.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.testing
-
-import scala.compat.Platform
-
-/** `Benchmark` can be used to quickly turn an existing class into a
- * benchmark. Here is a short example:
- * {{{
- * object sort1 extends Sorter with Benchmark {
- * def run = sort(List.range(1, 1000))
- * }
- * }}}
- * The `run` method has to be defined by the user, who will perform the
- * timed operation there. Run the benchmark as follows:
- * {{{
- * > scala sort1 5
- * }}}
- * This will run the benchmark 5 times, forcing a garbage collection
- * between runs, and printing the execution times to stdout.
- *
- * It is also possible to add a multiplier, so
- * {{{
- * > scala sort1 5 10
- * }}}
- * will run the entire benchmark 10 times, each time for 5 runs.
- *
- * @author Iulian Dragos, Burak Emir
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait Benchmark {
-
- /** this method should be implemented by the concrete benchmark.
- * This method is called by the benchmarking code for a number of times.
- * The GC is called between "multiplier" calls to run, right after tear
- * down.
- *
- * @see setUp
- * @see tearDown
- */
- def run()
-
- var multiplier = 1
-
- /** Run the benchmark the specified number of times and return a list with
- * the execution times in milliseconds in reverse order of the execution.
- */
- def runBenchmark(noTimes: Int): List[Long] =
- for (i <- List.range(1, noTimes + 1)) yield {
- setUp
- val startTime = Platform.currentTime
- var i = 0; while (i < multiplier) {
- run()
- i += 1
- }
- val stopTime = Platform.currentTime
- tearDown
- Platform.collectGarbage
-
- stopTime - startTime
- }
-
- /** Prepare any data needed by the benchmark, but whose execution time
- * should not be measured. This method is run before each call to the
- * benchmark payload, 'run'.
- */
- def setUp() {}
-
- /** Perform cleanup operations after each 'run'. For micro benchmarks,
- * think about using the result of 'run' in a way that prevents the JVM
- * to dead-code eliminate the whole 'run' method. For instance, print or
- * write the results to a file. The execution time of this method is not
- * measured.
- */
- def tearDown() {}
-
- /** a string that is written at the beginning of the output line
- * that contains the timings. By default, this is the class name.
- */
- def prefix: String = getClass().getName()
-
- /**
- * The entry point. It takes two arguments:
- * - argument `n` is the number of consecutive runs
- * - optional argument `mult` specifies that the `n` runs are repeated
- * `mult` times.
- */
- def main(args: Array[String]) {
- if (args.length > 0) {
- val logFile = new java.io.OutputStreamWriter(System.out)
- if (args.length > 1) multiplier = args(1).toInt
- logFile.write(prefix)
- for (t <- runBenchmark(args(0).toInt))
- logFile.write("\t" + t)
-
- logFile.write(Platform.EOL)
- logFile.flush()
- } else {
- println("Usage: scala benchmarks.program <runs> ")
- println(" or: scala benchmarks.program <runs> <multiplier>")
- println("""
- The benchmark is run <runs> times, forcing a garbage collection between runs. The optional
- <multiplier> causes the benchmark to be repeated <multiplier> times, each time for <runs>
- executions.
- """)
- }
- }
-}
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
deleted file mode 100644
index 9376e26db4..0000000000
--- a/src/library/scala/testing/Show.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.testing
-
-/** Classes inheriting trait `Show` can test their member methods using the
- * notation `meth(arg,,1,,, ..., arg,,n,,)`, where `meth` is the name of
- * the method and `arg,,1,,,...,arg,,n,,` are the arguments.
- *
- * The only difference to a normal method call is the leading quote
- * character (`'`). A quoted method call like the one above will produces
- * a legible diagnostic to be printed on [[scala.Console]].
- *
- * It is of the form
- *
- * `meth(arg,,1,,, ..., arg,,n,,)` gives `&lt;result&gt;`
- *
- * where `&lt;result&gt;` is the result of evaluating the call.
- *
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait Show {
-
- /** An implicit definition that adds an apply method to Symbol which forwards to `test`.
- * Prints out diagnostics of method applications.
- */
- implicit class SymApply(f: Symbol) {
- def apply[A](args: A*) {
- println(test(f, args: _*))
- }
- }
-
- @deprecated("use SymApply instead", "2.10.0")
- def symApply(sym: Symbol): SymApply = new SymApply(sym)
-
- /** Apply method with name of given symbol `f` to given arguments and return
- * a result diagnostics.
- */
- def test[A](f: Symbol, args: A*): String = {
- val args1 = args map (_.asInstanceOf[AnyRef])
- def testMethod(meth: java.lang.reflect.Method): String =
- f.name+"("+(args mkString ",")+") gives "+
- {
- try {
- meth.invoke(this, args1: _*)
- } catch {
- case ex: IllegalAccessException => ex
- case ex: IllegalArgumentException => ex
- case ex: java.lang.reflect.InvocationTargetException => ex
- }
- }
- getClass.getMethods.toList filter (_.getName == f.name) match {
- case List() =>
- f.name+" is not defined"
- case List(m) =>
- testMethod(m)
- case ms => // multiple methods, disambiguate by number of arguments
- ms filter (_.getParameterTypes.length == args.length) match {
- case List() =>
- testMethod(ms.head) // go ahead anyway, to get an exception
- case List(m) =>
- testMethod(m)
- case ms =>
- "cannot disambiguate between multiple implementations of "+f.name
- }
- }
- }
-}
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index b74fd152b5..59d5b1bf93 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -80,7 +80,7 @@ abstract class Document {
fmt(k, (i + ii, b, d) :: z)
case (i, true, DocBreak) :: z =>
writer write "\n"
- spaces(i);
+ spaces(i)
fmt(i, z)
case (i, false, DocBreak) :: z =>
writer write " "
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 159f1f02f4..5a5dd9a1f5 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -24,5 +24,5 @@ package scala
* @since 2.1
*/
class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation {
- def this(clazz: Class[T]) = this()
+ def this(clazz: Class[T]) = this("")
}
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index dba11ed73c..5cd35ab6d9 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -21,7 +21,7 @@ import scala.language.implicitConversions
* [[scala.util.Right]] takes the place of [[scala.Some]]. Convention dictates
* that Left is used for failure and Right is used for success.
*
- * For example, you could use ``Either[String, Int]`` to detect whether a
+ * For example, you could use `Either[String, Int]` to detect whether a
* received input is a String or an Int.
*
* {{{
@@ -205,7 +205,7 @@ final case class Right[+A, +B](b: B) extends Either[A, B] {
object Either {
/**
- * Allows use of a ``merge`` method to extract values from Either instances
+ * Allows use of a `merge` method to extract values from Either instances
* regardless of whether they are Left or Right.
*
* {{{
@@ -221,8 +221,6 @@ object Either {
case Right(a) => a
}
}
- @deprecated("use MergeableEither instead", "2.10.0")
- def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
/**
* Projects an `Either` into a `Left`.
diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala
deleted file mode 100644
index b78ed2140e..0000000000
--- a/src/library/scala/util/Marshal.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2008-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util
-
-/**
- * Marshalling of Scala objects using Scala tags.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-object Marshal {
- import java.io._
- import scala.reflect.ClassTag
-
- def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
- val ba = new ByteArrayOutputStream(512)
- val out = new ObjectOutputStream(ba)
- out.writeObject(t)
- out.writeObject(o)
- out.close()
- ba.toByteArray()
- }
-
- @throws(classOf[IOException])
- @throws(classOf[ClassCastException])
- @throws(classOf[ClassNotFoundException])
- def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
- val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
- val found = in.readObject.asInstanceOf[ClassTag[_]]
- try {
- found.runtimeClass.asSubclass(expected.runtimeClass)
- in.readObject.asInstanceOf[A]
- } catch {
- case _: ClassCastException =>
- in.close()
- throw new ClassCastException("type mismatch;"+
- "\n found : "+found+
- "\n required: "+expected)
- }
- }
-}
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index a5bc8faf8d..b82259c217 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -164,7 +164,7 @@ object MurmurHash {
var k = hiddenMagicB
var j = 0
while (j+1 < s.length) {
- val i = (s.charAt(j)<<16) + s.charAt(j+1);
+ val i = (s.charAt(j)<<16) + s.charAt(j+1)
h = extendHash(h,i,c,k)
c = nextMagicA(c)
k = nextMagicB(k)
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 24c4cd7a32..b3a8617f15 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -17,7 +17,7 @@ import scala.language.{implicitConversions, higherKinds}
* @author Stephane Micheloud
*
*/
-class Random(val self: java.util.Random) {
+class Random(val self: java.util.Random) extends AnyRef with Serializable {
/** Creates a new random number generator using a single long seed. */
def this(seed: Long) = this(new java.util.Random(seed))
@@ -117,7 +117,7 @@ class Random(val self: java.util.Random) {
swap(n - 1, k)
}
- (bf(xs) ++= buf).result
+ (bf(xs) ++= buf).result()
}
/** Returns a Stream of pseudorandomly chosen alphanumeric characters,
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 7749543caa..fbfeb7d4d9 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -164,7 +164,7 @@ object Try {
}
-final case class Failure[+T](val exception: Throwable) extends Try[T] {
+final case class Failure[+T](exception: Throwable) extends Try[T] {
def isFailure: Boolean = true
def isSuccess: Boolean = false
def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala
index 0d8cdfbace..74478f2a49 100644
--- a/src/library/scala/util/control/NonFatal.scala
+++ b/src/library/scala/util/control/NonFatal.scala
@@ -11,7 +11,7 @@ package scala.util.control
/**
* Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError`
* (for example, `OutOfMemoryError`, a subclass of `VirtualMachineError`), `ThreadDeath`,
- * `LinkageError`, `InterruptedException`, `ControlThrowable`, or `NotImplementedError`.
+ * `LinkageError`, `InterruptedException`, `ControlThrowable`.
* However, `StackOverflowError` is matched, i.e. considered non-fatal.
*
* Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by
@@ -35,7 +35,7 @@ object NonFatal {
def apply(t: Throwable): Boolean = t match {
case _: StackOverflowError => true // StackOverflowError ok even though it is a VirtualMachineError
// VirtualMachineError includes OutOfMemoryError and other fatal errors
- case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable | _: NotImplementedError => false
+ case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable => false
case _ => true
}
/**
diff --git a/src/library/scala/util/grammar/HedgeRHS.scala b/src/library/scala/util/grammar/HedgeRHS.scala
deleted file mode 100644
index d1c11a2f99..0000000000
--- a/src/library/scala/util/grammar/HedgeRHS.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util.grammar
-
-@deprecated("This class will be removed", "2.10.0")
-abstract class HedgeRHS
-
-/** Right hand side of a hedge production, deriving a single tree. */
-@deprecated("This class will be removed", "2.10.0")
-case class ConsRHS(tnt: Int, hnt: Int) extends HedgeRHS
-
-/** Right hand side of a hedge production, deriving any hedge. */
-@deprecated("This class will be removed", "2.10.0")
-case object AnyHedgeRHS extends HedgeRHS
-
-/** Right hand side of a hedge production, deriving the empty hedge. */
-@deprecated("This class will be removed", "2.10.0")
-case object EmptyHedgeRHS extends HedgeRHS
diff --git a/src/library/scala/util/grammar/TreeRHS.scala b/src/library/scala/util/grammar/TreeRHS.scala
deleted file mode 100644
index ee72ea982d..0000000000
--- a/src/library/scala/util/grammar/TreeRHS.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util.grammar
-
-/** Right hand side of a tree production. */
-@deprecated("This class will be removed", "2.10.0")
-abstract class TreeRHS
-
-/** Right hand side of a tree production, labelled with a letter from an alphabet. */
-@deprecated("This class will be removed", "2.10.0")
-case class LabelledRHS[A](label: A, hnt: Int) extends TreeRHS
-
-@deprecated("This class will be removed", "2.10.0")
-case object AnyTreeRHS extends TreeRHS
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 716d746552..981d9af02f 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -131,7 +131,7 @@ import java.util.regex.{ Pattern, Matcher }
* @author Martin Odersky
* @version 1.1, 29/01/2008
*
- * @param regex A string representing a regular expression
+ * @param pattern The compiled pattern
* @param groupNames A mapping from names to indices in capture groups
*
* @define replacementString
@@ -144,49 +144,67 @@ import java.util.regex.{ Pattern, Matcher }
* to automatically escape these characters.
*/
@SerialVersionUID(-2094783597747625537L)
-class Regex(regex: String, groupNames: String*) extends Serializable {
+class Regex private[matching](val pattern: Pattern, groupNames: String*) extends Serializable {
outer =>
import Regex._
- /** The compiled pattern */
- val pattern = Pattern.compile(regex)
+ /**
+ * @param regex A string representing a regular expression
+ * @param groupNames A mapping from names to indices in capture groups
+ */
+ def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*)
- /** Tries to match target (whole match) and returns the matching subgroups.
- * if the pattern has no subgroups, then it returns an empty list on a
- * successful match.
- *
- * Note, however, that if some subgroup has not been matched, a `null` will
- * be returned for that subgroup.
+ /** Tries to match a [[java.lang.CharSequence]].
+ * If the match succeeds, the result is a list of the matching
+ * groups (or a `null` element if a group did not match any input).
+ * If the pattern specifies no groups, then the result will be an empty list
+ * on a successful match.
*
+ * This method attempts to match the entire input by default; to find the next
+ * matching subsequence, use an unanchored Regex.
+
* For example:
*
* {{{
* val p1 = "ab*c".r
- * val p2 = "a(b*)c".r
- *
* val p1Matches = "abbbc" match {
* case p1() => true
* case _ => false
* }
- *
+ * val p2 = "a(b*)c".r
* val numberOfB = "abbbc" match {
* case p2(b) => Some(b.length)
* case _ => None
* }
+ * val p3 = "b*".r.unanchored
+ * val p3Matches = "abbbc" match {
+ * case p3() => true
+ * case _ => false
+ * }
* }}}
*
- * @param target The string to match
+ * @param s The string to match
* @return The matches
*/
- def unapplySeq(target: Any): Option[List[String]] = target match {
- case s: CharSequence =>
- val m = pattern matcher s
- if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
- else None
- case m: Match => unapplySeq(m.matched)
- case _ => None
+ def unapplySeq(s: CharSequence): Option[Seq[String]] = {
+ val m = pattern matcher s
+ if (runMatcher(m)) Some(1 to m.groupCount map m.group)
+ else None
}
+
+ /** Tries to match on a [[scala.util.matching.Regex.Match]].
+ * A previously failed match results in None.
+ * If a successful match was made against the current pattern, then that result is used.
+ * Otherwise, this Regex is applied to the previously matched input,
+ * and the result of that match is used.
+ */
+ def unapplySeq(m: Match): Option[Seq[String]] =
+ if (m.matched == null) None
+ else if (m.matcher.pattern == this.pattern) Some(1 to m.groupCount map m.group)
+ else unapplySeq(m.matched)
+
+ // @see UnanchoredRegex
protected def runMatcher(m: Matcher) = m.matches()
/** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]],
@@ -200,7 +218,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.util.matching.Regex.MatchIterator]] of all matches.
* @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
*/
- def findAllIn(source: java.lang.CharSequence) = new Regex.MatchIterator(source, this, groupNames)
+ def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames)
/** Return all matches of this regexp in given character sequence as a
@@ -210,12 +228,12 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches.
* @example {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}}
*/
- def findAllMatchIn(source: java.lang.CharSequence): Iterator[Match] = {
+ def findAllMatchIn(source: CharSequence): Iterator[Match] = {
val matchIterator = findAllIn(source)
new Iterator[Match] {
def hasNext = matchIterator.hasNext
def next: Match = {
- matchIterator.next;
+ matchIterator.next()
new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force
}
}
@@ -228,7 +246,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return An [[scala.Option]] of the first matching string in the text.
* @example {{{"""\w+""".r findFirstIn "A simple example." foreach println // prints "A"}}}
*/
- def findFirstIn(source: java.lang.CharSequence): Option[String] = {
+ def findFirstIn(source: CharSequence): Option[String] = {
val m = pattern.matcher(source)
if (m.find) Some(m.group) else None
}
@@ -245,7 +263,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.Option]] of [[scala.util.matching.Regex.Match]] of the first matching string in the text.
* @example {{{("""[a-z]""".r findFirstMatchIn "A simple example.") map (_.start) // returns Some(2), the index of the first match in the text}}}
*/
- def findFirstMatchIn(source: java.lang.CharSequence): Option[Match] = {
+ def findFirstMatchIn(source: CharSequence): Option[Match] = {
val m = pattern.matcher(source)
if (m.find) Some(new Match(source, m, groupNames)) else None
}
@@ -262,7 +280,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.Option]] of the matched prefix.
* @example {{{"""[a-z]""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}}
*/
- def findPrefixOf(source: java.lang.CharSequence): Option[String] = {
+ def findPrefixOf(source: CharSequence): Option[String] = {
val m = pattern.matcher(source)
if (m.lookingAt) Some(m.group) else None
}
@@ -279,7 +297,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.Option]] of the [[scala.util.matching.Regex.Match]] of the matched string.
* @example {{{"""\w+""".r findPrefixMatchOf "A simple example." map (_.after) // returns Some(" simple example.")}}}
*/
- def findPrefixMatchOf(source: java.lang.CharSequence): Option[Match] = {
+ def findPrefixMatchOf(source: CharSequence): Option[Match] = {
val m = pattern.matcher(source)
if (m.lookingAt) Some(new Match(source, m, groupNames)) else None
}
@@ -293,7 +311,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return The resulting string
* @example {{{"""\d+""".r replaceAllIn ("July 15", "<NUMBER>") // returns "July <NUMBER>"}}}
*/
- def replaceAllIn(target: java.lang.CharSequence, replacement: String): String = {
+ def replaceAllIn(target: CharSequence, replacement: String): String = {
val m = pattern.matcher(target)
m.replaceAll(replacement)
}
@@ -316,7 +334,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @param replacer The function which maps a match to another string.
* @return The target string after replacements.
*/
- def replaceAllIn(target: java.lang.CharSequence, replacer: Match => String): String = {
+ def replaceAllIn(target: CharSequence, replacer: Match => String): String = {
val it = new Regex.MatchIterator(target, this, groupNames).replacementData
it foreach (md => it replace replacer(md))
it.replaced
@@ -343,7 +361,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @param replacer The function which optionally maps a match to another string.
* @return The target string after replacements.
*/
- def replaceSomeIn(target: java.lang.CharSequence, replacer: Match => Option[String]): String = {
+ def replaceSomeIn(target: CharSequence, replacer: Match => Option[String]): String = {
val it = new Regex.MatchIterator(target, this, groupNames).replacementData
for (matchdata <- it ; replacement <- replacer(matchdata))
it replace replacement
@@ -359,7 +377,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @param replacement The string that will replace the match
* @return The resulting string
*/
- def replaceFirstIn(target: java.lang.CharSequence, replacement: String): String = {
+ def replaceFirstIn(target: CharSequence, replacement: String): String = {
val m = pattern.matcher(target)
m.replaceFirst(replacement)
}
@@ -370,7 +388,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return The array of strings computed by splitting the
* input around matches of this regexp
*/
- def split(toSplit: java.lang.CharSequence): Array[String] =
+ def split(toSplit: CharSequence): Array[String] =
pattern.split(toSplit)
/** Create a new Regex with the same pattern, but no requirement that
@@ -390,9 +408,11 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
*
* @return The new unanchored regex
*/
- def unanchored: UnanchoredRegex = new Regex(regex, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
+ def unanchored: UnanchoredRegex = new Regex(pattern, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
def anchored: Regex = this
+ def regex: String = pattern.pattern
+
/** The string defining the regular expression */
override def toString = regex
}
@@ -421,7 +441,7 @@ object Regex {
trait MatchData {
/** The source from where the match originated */
- val source: java.lang.CharSequence
+ val source: CharSequence
/** The names of the groups, or some empty sequence if one defined */
val groupNames: Seq[String]
@@ -459,25 +479,25 @@ object Regex {
/** The char sequence before first character of match,
* or `null` if nothing was matched */
- def before: java.lang.CharSequence =
+ def before: CharSequence =
if (start >= 0) source.subSequence(0, start)
else null
/** The char sequence before first character of match in group `i`,
* or `null` if nothing was matched for that group */
- def before(i: Int): java.lang.CharSequence =
+ def before(i: Int): CharSequence =
if (start(i) >= 0) source.subSequence(0, start(i))
else null
/** Returns char sequence after last character of match,
* or `null` if nothing was matched */
- def after: java.lang.CharSequence =
+ def after: CharSequence =
if (end >= 0) source.subSequence(end, source.length)
else null
/** The char sequence after last character of match in group `i`,
* or `null` if nothing was matched for that group */
- def after(i: Int): java.lang.CharSequence =
+ def after(i: Int): CharSequence =
if (end(i) >= 0) source.subSequence(end(i), source.length)
else null
@@ -501,8 +521,8 @@ object Regex {
/** Provides information about a succesful match.
*/
- class Match(val source: java.lang.CharSequence,
- matcher: Matcher,
+ class Match(val source: CharSequence,
+ private[matching] val matcher: Matcher,
val groupNames: Seq[String]) extends MatchData {
/** The index of the first matched character */
@@ -563,7 +583,7 @@ object Regex {
/** A class to step through a sequence of regex matches
*/
- class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String])
+ class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String])
extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
protected[Regex] val matcher = regex.pattern.matcher(source)
@@ -602,14 +622,14 @@ object Regex {
/** Convert to an iterator that yields MatchData elements instead of Strings */
def matchData: Iterator[Match] = new AbstractIterator[Match] {
def hasNext = self.hasNext
- def next = { self.next; new Match(source, matcher, groupNames).force }
+ def next = { self.next(); new Match(source, matcher, groupNames).force }
}
/** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */
private[matching] def replacementData = new AbstractIterator[Match] with Replacement {
def matcher = self.matcher
def hasNext = self.hasNext
- def next = { self.next; new Match(source, matcher, groupNames).force }
+ def next = { self.next(); new Match(source, matcher, groupNames).force }
}
}
@@ -620,7 +640,7 @@ object Regex {
private[matching] trait Replacement {
protected def matcher: Matcher
- private var sb = new java.lang.StringBuffer
+ private val sb = new java.lang.StringBuffer
def replaced = {
val newsb = new java.lang.StringBuffer(sb)
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index 16705d45f9..cd0907e40f 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -8,7 +8,6 @@
package scala.util.parsing.combinator
-import scala.util.parsing.combinator._
import scala.util.parsing.input.{ Reader, Position }
import scala.collection.mutable
import scala.language.implicitConversions
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index ead444653e..542a781b60 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -758,7 +758,7 @@ trait Parsers {
if (elems.length == num) Success(elems.toList, in0)
else p0(in0) match {
case Success(x, rest) => elems += x ; applyp(rest)
- case ns: NoSuccess => return ns
+ case ns: NoSuccess => ns
}
applyp(in)
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
index 5c23ad70cd..f6a8daabd9 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
-
package scala.util.parsing
package combinator
package lexical
-import token._
import input._
/** This component provides core functionality for lexical parsers.
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index f3491c096f..2fbc1ec136 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -50,7 +50,7 @@ class StdLexical extends Lexical with StdTokens {
def identChar = letter | elem('_')
// see `whitespace in `Scanners`
- def whitespace: Parser[Any] = rep(
+ def whitespace: Parser[Any] = rep[Any](
whitespaceChar
| '/' ~ '*' ~ comment
| '/' ~ '/' ~ rep( chrExcept(EofCh, '\n') )
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
index 95730ee292..3cdab2a885 100644
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ b/src/library/scala/util/parsing/combinator/testing/Tester.scala
@@ -7,7 +7,6 @@
\* */
package scala.util.parsing.combinator.testing
-import scala.util.parsing.combinator._
import scala.util.parsing.combinator.lexical.Lexical
import scala.util.parsing.combinator.syntactical.TokenParsers
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 01d9ea5cb8..6b00af4ce2 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -22,7 +22,7 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
/** An index that contains all line starts, including first line, and eof. */
private lazy val index: Array[Int] = {
- var lineStarts = new ArrayBuffer[Int]
+ val lineStarts = new ArrayBuffer[Int]
lineStarts += 0
for (i <- 0 until source.length)
if (source.charAt(i) == '\n') lineStarts += (i + 1)
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index 31715bd8da..5e0cbbff5e 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -8,13 +8,13 @@
package scala.util.parsing.input
-/** `Position` is the base trait for objects describing a position in a ``document''.
+/** `Position` is the base trait for objects describing a position in a `document`.
*
* It provides functionality for:
* - generating a visual representation of this position (`longString`);
* - comparing two positions (`<`).
*
- * To use this class for a concrete kind of ``document'', implement the `lineContents` method.
+ * To use this class for a concrete kind of `document`, implement the `lineContents` method.
*
* @author Martin Odersky
* @author Adriaan Moors
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala
index 2f450ed864..8f951d519a 100644
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ b/src/library/scala/util/parsing/json/JSON.scala
@@ -7,9 +7,6 @@
\* */
package scala.util.parsing.json
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.lexical._
/**
* This object provides a simple interface to the JSON parser class.
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala
index 991b5d5c6c..762c1352a7 100644
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ b/src/library/scala/util/parsing/json/Lexer.scala
@@ -11,7 +11,6 @@
package scala.util.parsing.json
import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
import scala.util.parsing.combinator.lexical._
import scala.util.parsing.input.CharArrayReader.EofCh
diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/library/scala/util/parsing/json/Parser.scala
index cb87866f07..bf1162000b 100644
--- a/src/library/scala/util/parsing/json/Parser.scala
+++ b/src/library/scala/util/parsing/json/Parser.scala
@@ -12,7 +12,6 @@ package scala.util.parsing.json
import scala.util.parsing.combinator._
import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.lexical._
/**
* A marker class for the JSON result types.
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index 0224913cf6..234281163d 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -94,7 +94,7 @@ abstract trait Attribute extends MetaData {
sb append key append '='
val sb2 = new StringBuilder()
- Utility.sequenceToXML(value, TopScope, sb2, true)
+ Utility.sequenceToXML(value, TopScope, sb2, stripComments = true)
Utility.appendQuoted(sb2.toString, sb)
}
}
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index b9e665e292..fc32e45a5e 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -17,7 +17,7 @@ package scala.xml
* @author Burak Emir <bqe@google.com>
*/
object Elem {
- /** Build an Elem, setting its minimizeEmpty property to <code>true</code> if it has no children. Note that this
+ /** Build an Elem, setting its minimizeEmpty property to `true` if it has no children. Note that this
* default may not be exactly what you want, as some XML dialects don't permit some elements to be minimized.
*
* @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
index 02db22a78a..20f2405967 100644
--- a/src/library/scala/xml/Equality.scala
+++ b/src/library/scala/xml/Equality.scala
@@ -86,8 +86,8 @@ trait Equality extends scala.Equals {
* to maintain a semblance of order.
*/
override def hashCode() = basisForHashCode.##
- override def equals(other: Any) = doComparison(other, false)
- final def xml_==(other: Any) = doComparison(other, true)
+ override def equals(other: Any) = doComparison(other, blithe = false)
+ final def xml_==(other: Any) = doComparison(other, blithe = true)
final def xml_!=(other: Any) = !xml_==(other)
/** The "blithe" parameter expresses the caller's unconcerned attitude
diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala
index 92da2f993f..2ee3941aa1 100644
--- a/src/library/scala/xml/Group.scala
+++ b/src/library/scala/xml/Group.scala
@@ -13,7 +13,7 @@ package scala.xml
* @author Burak Emir
* @version 1.0
*/
-final case class Group(val nodes: Seq[Node]) extends Node {
+final case class Group(nodes: Seq[Node]) extends Node {
override def theSeq = nodes
override def canEqual(other: Any) = other match {
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
index c7cd9e6b6c..32c378f3ef 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/library/scala/xml/NamespaceBinding.scala
@@ -38,6 +38,20 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin
override def toString(): String = sbToString(buildString(_, TopScope))
+ private def shadowRedefined(stop: NamespaceBinding): NamespaceBinding = {
+ def prefixList(x: NamespaceBinding): List[String] =
+ if ((x == null) || (x eq stop)) Nil
+ else x.prefix :: prefixList(x.parent)
+ def fromPrefixList(l: List[String]): NamespaceBinding = l match {
+ case Nil => stop
+ case x :: xs => new NamespaceBinding(x, this.getURI(x), fromPrefixList(xs))
+ }
+ val ps0 = prefixList(this).reverse
+ val ps = ps0.distinct
+ if (ps.size == ps0.size) this
+ else fromPrefixList(ps)
+ }
+
override def canEqual(other: Any) = other match {
case _: NamespaceBinding => true
case _ => false
@@ -53,12 +67,16 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin
def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop))
def buildString(sb: StringBuilder, stop: NamespaceBinding) {
- if (this eq stop) return // contains?
+ shadowRedefined(stop).doBuildString(sb, stop)
+ }
+
+ private def doBuildString(sb: StringBuilder, stop: NamespaceBinding) {
+ if ((this == null) || (this eq stop)) return // contains?
val s = " xmlns%s=\"%s\"".format(
(if (prefix != null) ":" + prefix else ""),
(if (uri != null) uri else "")
)
- parent.buildString(sb append s, stop) // copy(ignore)
+ parent.doBuildString(sb append s, stop) // copy(ignore)
}
}
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index 6b6c962692..7b1a97e8f2 100755
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -55,7 +55,7 @@ abstract class Node extends NodeSeq {
def scope: NamespaceBinding = TopScope
/**
- * convenience, same as <code>getNamespace(this.prefix)</code>
+ * convenience, same as `getNamespace(this.prefix)`
*/
def namespace = getNamespace(this.prefix)
@@ -64,8 +64,8 @@ abstract class Node extends NodeSeq {
* checks if scope is `'''null'''`.
*
* @param pre the prefix whose namespace name we would like to obtain
- * @return the namespace if <code>scope != null</code> and prefix was
- * found, else <code>null</code>
+ * @return the namespace if `scope != null` and prefix was
+ * found, else `null`
*/
def getNamespace(pre: String): String = if (scope eq null) null else scope.getURI(pre)
@@ -74,8 +74,8 @@ abstract class Node extends NodeSeq {
* Same as `attributes.getValue(key)`
*
* @param key of queried attribute.
- * @return value of <code>UnprefixedAttribute</code> with given key
- * in attributes, if it exists, otherwise <code>null</code>.
+ * @return value of `UnprefixedAttribute` with given key
+ * in attributes, if it exists, otherwise `null`.
*/
final def attribute(key: String): Option[Seq[Node]] = attributes.get(key)
@@ -163,7 +163,7 @@ abstract class Node extends NodeSeq {
/**
* Same as `toString('''false''')`.
*/
- override def toString(): String = buildString(false)
+ override def toString(): String = buildString(stripComments = false)
/**
* Appends qualified name of this node to `StringBuilder`.
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index decf60dad7..d2efc947b1 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -145,6 +145,11 @@ abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with S
}
}
+ /** Convenience method which returns string text of the named attribute. Use:
+ * - `that \@ "foo"` to get the string text of attribute `"foo"`;
+ */
+ def \@(attributeName: String): String = (this \ ("@" + attributeName)).text
+
override def toString(): String = theSeq.mkString
def text: String = (this map (_.text)).mkString
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index 39ff8c35ec..720fe79b1d 100755
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -47,7 +47,6 @@ class PrettyPrinter(width: Int, step: Int) {
val tmp = width - cur
if (s.length <= tmp)
return List(Box(ind, s))
- val sb = new StringBuilder()
var i = s indexOf ' '
if (i > tmp || i == -1) throw new BrokenException() // cannot break
@@ -142,13 +141,13 @@ class PrettyPrinter(width: Int, step: Int) {
case Text(s) if s.trim() == "" =>
;
case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr =>
- makeBox( ind, node.toString.trim() )
+ makeBox( ind, node.toString().trim() )
case g @ Group(xs) =>
traverse(xs.iterator, pscope, ind)
case _ =>
val test = {
val sb = new StringBuilder()
- Utility.serialize(node, pscope, sb, false)
+ Utility.serialize(node, pscope, sb, stripComments = false)
if (doPreserve(node)) sb.toString
else TextBuffer.fromString(sb.toString).toText(0).data
}
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 030a89773e..fff27c6e30 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -245,10 +245,10 @@ object Utility extends AnyRef with parsing.TokenTests {
if (children.isEmpty) return
else if (children forall isAtomAndNotText) { // add space
val it = children.iterator
- val f = it.next
+ val f = it.next()
serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
while (it.hasNext) {
- val x = it.next
+ val x = it.next()
sb.append(' ')
serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
}
@@ -311,14 +311,14 @@ object Utility extends AnyRef with parsing.TokenTests {
while (i < value.length) {
value.charAt(i) match {
case '<' =>
- return "< not allowed in attribute value";
+ return "< not allowed in attribute value"
case '&' =>
val n = getName(value, i+1)
if (n eq null)
- return "malformed entity reference in attribute value ["+value+"]";
+ return "malformed entity reference in attribute value ["+value+"]"
i = i + n.length + 1
if (i >= value.length || value.charAt(i) != ';')
- return "malformed entity reference in attribute value ["+value+"]";
+ return "malformed entity reference in attribute value ["+value+"]"
case _ =>
}
i = i + 1
@@ -333,22 +333,22 @@ object Utility extends AnyRef with parsing.TokenTests {
val it = value.iterator
while (it.hasNext) {
- var c = it.next
+ var c = it.next()
// entity! flush buffer into text node
if (c == '&') {
- c = it.next
+ c = it.next()
if (c == '#') {
- c = it.next
- val theChar = parseCharRef ({ ()=> c },{ () => c = it.next },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
+ c = it.next()
+ val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
sb.append(theChar)
}
else {
if (rfb eq null) rfb = new StringBuilder()
rfb append c
- c = it.next
+ c = it.next()
while (c != ';') {
rfb.append(c)
- c = it.next
+ c = it.next()
}
val ref = rfb.toString()
rfb.clear()
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index d101684459..ec5e5e9e1c 100755
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -45,8 +45,6 @@ object MinimizeMode extends Enumeration {
val Never = Value
}
-import Source._
-
/** The object `XML` provides constants, and functions to load
* and save XML elements. Use this when data binding is not desired, i.e.
* when XML is handled using `Symbol` nodes.
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index abc71f55bd..debdf37975 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -11,8 +11,7 @@
package scala.xml
package dtd
-import scala.util.regexp.WordExp
-import scala.util.automata._
+import scala.xml.dtd.impl._
import scala.xml.Utility.sbToString
import PartialFunction._
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index ace02193da..ca84bcad70 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -21,19 +21,19 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
scala.sys.error("in DTDs, \n"+
- "mixed content models must be like (#PCDATA|Name|Name|...)*");
+ "mixed content models must be like (#PCDATA|Name|Name|...)*")
else
scala.sys.error("expected "+token2string(tok)+
- ", got unexpected token:"+token2string(token));
+ ", got unexpected token:"+token2string(token))
}
- nextToken
+ nextToken()
}
// s [ '+' | '*' | '?' ]
def maybeSuffix(s: RegExp) = token match {
- case STAR => nextToken; Star(s)
- case PLUS => nextToken; Sequ(s, Star(s))
- case OPT => nextToken; Alt(Eps, s)
+ case STAR => nextToken(); Star(s)
+ case PLUS => nextToken(); Sequ(s, Star(s))
+ case OPT => nextToken(); Alt(Eps, s)
case _ => s
}
@@ -44,45 +44,45 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value )
}
case LPAREN =>
- nextToken;
- sOpt;
+ nextToken()
+ sOpt()
if (token != TOKEN_PCDATA)
- ELEMENTS(regexp);
+ ELEMENTS(regexp)
else {
- nextToken;
+ nextToken()
token match {
case RPAREN =>
PCDATA
case CHOICE =>
- val res = MIXED(choiceRest(Eps));
- sOpt;
- accept( RPAREN );
- accept( STAR );
+ val res = MIXED(choiceRest(Eps))
+ sOpt()
+ accept( RPAREN )
+ accept( STAR )
res
case _ =>
- scala.sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) )
}
}
case _ =>
- scala.sys.error("unexpected token:" + token2string(token) );
- }
+ scala.sys.error("unexpected token:" + token2string(token) )
+ }
// sopt ::= S?
- def sOpt() = if( token == S ) nextToken;
+ def sOpt() = if( token == S ) nextToken()
// (' S? mixed ::= '#PCDATA' S? ')'
// | '#PCDATA' (S? '|' S? atom)* S? ')*'
// '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ]
def regexp: RegExp = {
- val p = particle;
- sOpt;
+ val p = particle
+ sOpt()
maybeSuffix(token match {
- case RPAREN => nextToken; p
+ case RPAREN => nextToken(); p
case CHOICE => val q = choiceRest( p );accept( RPAREN ); q
case COMMA => val q = seqRest( p ); accept( RPAREN ); q
})
@@ -90,24 +90,24 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
// seqRest ::= (',' S? cp S?)+
def seqRest(p: RegExp) = {
- var k = List(p);
+ var k = List(p)
while( token == COMMA ) {
- nextToken;
- sOpt;
- k = particle::k;
- sOpt;
+ nextToken()
+ sOpt()
+ k = particle::k
+ sOpt()
}
Sequ( k.reverse:_* )
}
// choiceRest ::= ('|' S? cp S?)+
def choiceRest( p:RegExp ) = {
- var k = List( p );
+ var k = List( p )
while( token == CHOICE ) {
- nextToken;
- sOpt;
- k = particle::k;
- sOpt;
+ nextToken()
+ sOpt()
+ k = particle::k
+ sOpt()
}
Alt( k.reverse:_* )
}
@@ -115,14 +115,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
// particle ::= '(' S? regexp
// | name [ '+' | '*' | '?' ]
def particle = token match {
- case LPAREN => nextToken; sOpt; regexp;
- case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
+ case LPAREN => nextToken(); sOpt(); regexp
+ case NAME => val a = Letter(ElemName(value)); nextToken(); maybeSuffix(a)
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token))
}
// atom ::= name
def atom = token match {
- case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => scala.sys.error("expected Name, got:"+token2string(token));
+ case NAME => val a = Letter(ElemName(value)); nextToken(); a
+ case _ => scala.sys.error("expected Name, got:"+token2string(token))
}
}
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
index dc4cb93ddf..fd2eaa30ba 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/library/scala/xml/dtd/Decl.scala
@@ -123,7 +123,7 @@ case class ExtDef(extID:ExternalID) extends EntityDef {
/** a parsed entity reference */
case class PEReference(ent:String) extends MarkupDecl {
if( !Utility.isName( ent ))
- throw new IllegalArgumentException("ent must be an XML Name");
+ throw new IllegalArgumentException("ent must be an XML Name")
override def buildString(sb: StringBuilder): StringBuilder =
sb append '%' append ent append ';'
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index 79f8f9fe8b..af7e77e76f 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -15,11 +15,10 @@ package dtd
* @author Burak Emir
*
* @param name name of this DOCTYPE
- * @param extID None, or Some(external ID of this doctype)
+ * @param extID NoExternalID or the external ID of this doctype
* @param intSubset sequence of internal subset declarations
*/
-case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl])
-{
+case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) {
if (!Utility.isName(name))
throw new IllegalArgumentException(name+" must be an XML Name")
@@ -29,6 +28,11 @@ case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl])
if (intSubset.isEmpty) ""
else intSubset.mkString("[", "", "]")
- """<!DOCTYPE %s %s%s>""".format(name, extID.toString, intString)
+ """<!DOCTYPE %s %s%s>""".format(name, extID.toString(), intString)
}
}
+
+object DocType {
+ /** Creates a doctype with no external id, nor internal subset declarations. */
+ def apply(name: String): DocType = apply(name, NoExternalID, Nil)
+}
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index bfc85f48a9..ad74acb77e 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -12,10 +12,12 @@ package scala.xml
package dtd
import PartialFunction._
+import scala.collection.mutable
+
import ContentModel.ElemName
import MakeValidationException._ // @todo other exceptions
-import scala.util.automata._
-import scala.collection.mutable
+
+import impl._
/** validate children and/or attributes of an element
* exceptions are created but not thrown.
@@ -61,7 +63,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
*/
def check(md: MetaData): Boolean = {
val len: Int = exc.length
- var ok = new mutable.BitSet(adecls.length)
+ val ok = new mutable.BitSet(adecls.length)
for (attr <- md) {
def attrStr = attr.value.toString
@@ -97,21 +99,21 @@ class ElementValidator() extends Function1[Node,Boolean] {
*/
def check(nodes: Seq[Node]): Boolean = contentModel match {
case ANY => true
- case EMPTY => getIterable(nodes, false).isEmpty
- case PCDATA => getIterable(nodes, true).isEmpty
+ case EMPTY => getIterable(nodes, skipPCDATA = false).isEmpty
+ case PCDATA => getIterable(nodes, skipPCDATA = true).isEmpty
case MIXED(ContentModel.Alt(branches @ _*)) => // @todo
val j = exc.length
def find(Key: String): Boolean =
branches exists { case ContentModel.Letter(ElemName(Key)) => true ; case _ => false }
- getIterable(nodes, true) map (_.name) filterNot find foreach {
+ getIterable(nodes, skipPCDATA = true) map (_.name) filterNot find foreach {
exc ::= MakeValidationException fromUndefinedElement _
}
(exc.length == j) // - true if no new exception
case _: ELEMENTS =>
dfa isFinal {
- getIterable(nodes, false).foldLeft(0) { (q, e) =>
+ getIterable(nodes, skipPCDATA = false).foldLeft(0) { (q, e) =>
(dfa delta q).getOrElse(e, throw ValidationException("element %s not allowed here" format e))
}
}
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
index 7a7463569e..80ada0caaa 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/library/scala/xml/dtd/ExternalID.scala
@@ -14,8 +14,7 @@ package dtd
*
* @author Burak Emir
*/
-abstract class ExternalID extends parsing.TokenTests
-{
+abstract class ExternalID extends parsing.TokenTests {
def quoted(s: String) = {
val c = if (s contains '"') '\'' else '"'
c + s + c
@@ -73,3 +72,14 @@ case class PublicID(publicId: String, systemId: String) extends ExternalID {
/** always empty */
def child = Nil
}
+
+/** A marker used when a `DocType` contains no external id.
+ *
+ * @author Michael Bayne
+ */
+object NoExternalID extends ExternalID {
+ val publicId = null
+ val systemId = null
+
+ override def toString = ""
+}
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 9b64cc61e2..53404e34a7 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -28,8 +28,8 @@ class Scanner extends Tokens with parsing.TokenTests {
value = ""
it = (s).iterator
token = 1+END
- next
- nextToken
+ next()
+ nextToken()
}
/** scans the next token */
@@ -39,29 +39,29 @@ class Scanner extends Tokens with parsing.TokenTests {
// todo: see XML specification... probably isLetter,isDigit is fine
final def isIdentChar = ( ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z'));
+ || ('A' <= c && c <= 'Z'))
- final def next() = if (it.hasNext) c = it.next else c = ENDCH
+ final def next() = if (it.hasNext) c = it.next() else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next() else scala.sys.error("expected '"+d+"' found '"+c+"' !")
}
final def accS(ds: Seq[Char]) { ds foreach acc }
final def readToken: Int =
if (isSpace(c)) {
- while (isSpace(c)) c = it.next
+ while (isSpace(c)) c = it.next()
S
} else c match {
- case '(' => next; LPAREN
- case ')' => next; RPAREN
- case ',' => next; COMMA
- case '*' => next; STAR
- case '+' => next; PLUS
- case '?' => next; OPT
- case '|' => next; CHOICE
- case '#' => next; accS( "PCDATA" ); TOKEN_PCDATA
+ case '(' => next(); LPAREN
+ case ')' => next(); RPAREN
+ case ',' => next(); COMMA
+ case '*' => next(); STAR
+ case '+' => next(); PLUS
+ case '?' => next(); OPT
+ case '|' => next(); CHOICE
+ case '#' => next(); accS( "PCDATA" ); TOKEN_PCDATA
case ENDCH => END
case _ =>
if (isNameStart(c)) name; // NAME
@@ -70,7 +70,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def name = {
val sb = new StringBuilder()
- do { sb.append(c); next } while (isNameChar(c));
+ do { sb.append(c); next() } while (isNameChar(c))
value = sb.toString()
NAME
}
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala
index 243db69ab7..15640e2da7 100644
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ b/src/library/scala/xml/dtd/ValidationException.scala
@@ -33,7 +33,7 @@ object MakeValidationException {
def fromMissingAttribute(allKeys: Set[String]) = {
val sb = new StringBuilder("missing value for REQUIRED attribute")
- if (allKeys.size > 1) sb.append('s');
+ if (allKeys.size > 1) sb.append('s')
allKeys foreach (k => sb append "'%s'".format(k))
new ValidationException(sb.toString())
}
diff --git a/src/library/scala/util/regexp/Base.scala b/src/library/scala/xml/dtd/impl/Base.scala
index 7dbe60a34e..dd277779f6 100644
--- a/src/library/scala/util/regexp/Base.scala
+++ b/src/library/scala/xml/dtd/impl/Base.scala
@@ -8,7 +8,7 @@
-package scala.util.regexp
+package scala.xml.dtd.impl
/** Basic regular expressions.
*
@@ -17,7 +17,7 @@ package scala.util.regexp
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class Base {
+private[dtd] abstract class Base {
type _regexpT <: RegExp
abstract class RegExp {
diff --git a/src/library/scala/util/automata/BaseBerrySethi.scala b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
index 3f6f4507a9..99d5ab62e1 100644
--- a/src/library/scala/util/automata/BaseBerrySethi.scala
+++ b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-package scala.util.automata
+package scala.xml.dtd.impl
-import scala.util.regexp.{ Base }
import scala.collection.{ mutable, immutable }
// todo: replace global variable pos with acc
@@ -18,7 +17,7 @@ import scala.collection.{ mutable, immutable }
* position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class BaseBerrySethi {
+private[dtd] abstract class BaseBerrySethi {
val lang: Base
import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star }
diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/xml/dtd/impl/DetWordAutom.scala
index 5d709106f8..5c1dcb7ff8 100644
--- a/src/library/scala/util/automata/DetWordAutom.scala
+++ b/src/library/scala/xml/dtd/impl/DetWordAutom.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
-package scala.util.automata
+package scala.xml.dtd.impl
import scala.collection.{ mutable, immutable }
@@ -21,7 +21,7 @@ import scala.collection.{ mutable, immutable }
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class DetWordAutom[T <: AnyRef] {
+private[dtd] abstract class DetWordAutom[T <: AnyRef] {
val nstates: Int
val finals: Array[Int]
val delta: Array[mutable.Map[T, Int]]
diff --git a/src/library/scala/util/automata/Inclusion.scala b/src/library/scala/xml/dtd/impl/Inclusion.scala
index 91441bd3a8..0ae78519ca 100644
--- a/src/library/scala/util/automata/Inclusion.scala
+++ b/src/library/scala/xml/dtd/impl/Inclusion.scala
@@ -8,7 +8,7 @@
-package scala.util.automata
+package scala.xml.dtd.impl
/** A fast test of language inclusion between minimal automata.
@@ -18,7 +18,7 @@ package scala.util.automata
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-trait Inclusion[A <: AnyRef] {
+private[dtd] trait Inclusion[A <: AnyRef] {
val labels: Seq[A]
diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
index 24c6612d0f..8e0b5a3a4c 100644
--- a/src/library/scala/util/automata/NondetWordAutom.scala
+++ b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
-package scala.util.automata
+package scala.xml.dtd.impl
import scala.collection.{ immutable, mutable }
@@ -18,7 +18,7 @@ import scala.collection.{ immutable, mutable }
* the partial function `finals` is defined.
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class NondetWordAutom[T <: AnyRef] {
+private[dtd] abstract class NondetWordAutom[T <: AnyRef] {
val nstates: Int
val labels: Seq[T]
val finals: Array[Int] // 0 means not final
@@ -37,10 +37,10 @@ abstract class NondetWordAutom[T <: AnyRef] {
/** @return true if there are no accepting states */
final def isEmpty = (0 until nstates) forall (x => !isFinal(x))
- /** @return a immutable.BitSet with the next states for given state and label */
+ /** @return an immutable.BitSet with the next states for given state and label */
def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q))
- /** @return a immutable.BitSet with the next states for given state and label */
+ /** @return an immutable.BitSet with the next states for given state and label */
def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a))
def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default)
diff --git a/src/library/scala/util/regexp/PointedHedgeExp.scala b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
index 5c0379b6f8..0b5297510d 100644
--- a/src/library/scala/util/regexp/PointedHedgeExp.scala
+++ b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
@@ -8,7 +8,7 @@
-package scala.util.regexp
+package scala.xml.dtd.impl
/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions.
*
@@ -16,7 +16,7 @@ package scala.util.regexp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class PointedHedgeExp extends Base {
+private[dtd] abstract class PointedHedgeExp extends Base {
type _regexpT <: RegExp
type _labelT
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
index 0ee768587c..d1ea4b6e9e 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-package scala.util.automata
+package scala.xml.dtd.impl
import scala.collection.{ mutable, immutable }
@deprecated("This class will be removed", "2.10.0")
-class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
+private[dtd] class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
import nfa.labels
def selectTag(Q: immutable.BitSet, finals: Array[Int]) =
@@ -50,7 +50,7 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
addFinal(q0) // initial state may also be a final state
while (!rest.isEmpty) {
- val P = rest.pop
+ val P = rest.pop()
// assign a number to this bitset
indexMap = indexMap.updated(P, ix)
invIndexMap = invIndexMap.updated(ix, P)
diff --git a/src/library/scala/util/regexp/SyntaxError.scala b/src/library/scala/xml/dtd/impl/SyntaxError.scala
index 1788fdfb84..b0e0b8b6cd 100644
--- a/src/library/scala/util/regexp/SyntaxError.scala
+++ b/src/library/scala/xml/dtd/impl/SyntaxError.scala
@@ -8,7 +8,7 @@
-package scala.util.regexp
+package scala.xml.dtd.impl
/** This runtime exception is thrown if an attempt to instantiate a
* syntactically incorrect expression is detected.
@@ -17,4 +17,4 @@ package scala.util.regexp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-class SyntaxError(e: String) extends RuntimeException(e)
+private[dtd] class SyntaxError(e: String) extends RuntimeException(e)
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
index 12448f595d..90d7fe760a 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
@@ -6,10 +6,9 @@
** |/ **
\* */
-package scala.util.automata
+package scala.xml.dtd.impl
import scala.collection.{ immutable, mutable }
-import scala.util.regexp.WordExp
/** This class turns a regular expression into a [[scala.util.automata.NondetWordAutom]]
* celebrated position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
@@ -18,10 +17,10 @@ import scala.util.regexp.WordExp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class WordBerrySethi extends BaseBerrySethi {
+private[dtd] abstract class WordBerrySethi extends BaseBerrySethi {
override val lang: WordExp
- import lang.{ Alt, Eps, Letter, Meta, RegExp, Sequ, Star, _labelT }
+ import lang.{ Alt, Eps, Letter, RegExp, Sequ, Star, _labelT }
protected var labels: mutable.HashSet[_labelT] = _
// don't let this fool you, only labelAt is a real, surjective mapping
@@ -140,7 +139,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final
- val initialsArr = initials.toArray
val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] =
(0 until pos map { x =>
@@ -152,7 +150,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
new NondetWordAutom[_labelT] {
val nstates = pos
val labels = WordBerrySethi.this.labels.toList
- val initials = initialsArr
val finals = finalsArr
val delta = deltaArr
val default = defaultArr
diff --git a/src/library/scala/util/regexp/WordExp.scala b/src/library/scala/xml/dtd/impl/WordExp.scala
index 3c0c2ec156..38f8aea697 100644
--- a/src/library/scala/util/regexp/WordExp.scala
+++ b/src/library/scala/xml/dtd/impl/WordExp.scala
@@ -8,7 +8,7 @@
-package scala.util.regexp
+package scala.xml.dtd.impl
/**
* The class `WordExp` provides regular word expressions.
@@ -39,7 +39,7 @@ package scala.util.regexp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class WordExp extends Base {
+private[dtd] abstract class WordExp extends Base {
abstract class Label
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index bad4a4ea09..b463fda5ba 100755
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -48,7 +48,7 @@ abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
val old = result
result = new NodeBuffer()
for (m <- x.child) traverse(m)
- result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList;
+ result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList
elemEnd(0, x.prefix, x.label)
}
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index cac61acc39..49a6d622a7 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -46,7 +46,7 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin
override def makeNode(pre: String, label: String, attrSeq: MetaData,
scope: NamespaceBinding, children: Seq[Node]): A = {
if (logNode)
- log("[makeNode for "+label+"]");
+ log("[makeNode for "+label+"]")
val hash = Utility.hashCode(pre, label, attrSeq.##, scope.##, children)
@@ -59,26 +59,26 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin
}
*/
if (!cache.get( hash ).isEmpty && (logCompressLevel >= CACHE))
- log("[cache hit !]");
+ log("[cache hit !]")
super.makeNode(pre, label, attrSeq, scope, children)
}
override def makeText(s: String) = {
if (logText)
- log("[makeText:\""+s+"\"]");
+ log("[makeText:\""+s+"\"]")
super.makeText(s)
}
override def makeComment(s: String): Seq[Comment] = {
if (logComment)
- log("[makeComment:\""+s+"\"]");
+ log("[makeComment:\""+s+"\"]")
super.makeComment(s)
}
override def makeProcInstr(t: String, s: String): Seq[ProcInstr] = {
if (logProcInstr)
- log("[makeProcInstr:\""+t+" "+ s+"\"]");
+ log("[makeProcInstr:\""+t+" "+ s+"\"]")
super.makeProcInstr(t, s)
}
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
index 72e4c51b11..bd18f2a699 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/library/scala/xml/factory/XMLLoader.scala
@@ -12,7 +12,7 @@ package factory
import javax.xml.parsers.SAXParserFactory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
+import java.io.{ InputStream, Reader, File, FileDescriptor }
import java.net.URL
/** Presents collection of XML loading methods which use the parser
@@ -38,7 +38,7 @@ trait XMLLoader[T <: Node]
newAdapter.scopeStack push TopScope
parser.parse(source, newAdapter)
- newAdapter.scopeStack.pop
+ newAdapter.scopeStack.pop()
newAdapter.rootElem.asInstanceOf[T]
}
diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
index 1340689cae..8d8ce5b290 100644
--- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala
+++ b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-
package scala.xml
package include.sax
-import scala.xml.include._
import java.io.InputStream
import scala.util.matching.Regex
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
deleted file mode 100644
index 92d4d6ea73..0000000000
--- a/src/library/scala/xml/include/sax/Main.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.xml
-package include.sax
-
-import scala.util.control.Exception.{ catching, ignoring }
-import org.xml.sax.XMLReader
-import org.xml.sax.helpers.XMLReaderFactory
-
-@deprecated("Code example will be moved to documentation.", "2.10.0")
-object Main {
- private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes"
- private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler"
-
- /**
- * The driver method for xinc
- * Output is written to System.out via Conolse
- * </p>
- *
- * @param args contains the URLs and/or filenames
- * of the documents to be processed.
- */
- def main(args: Array[String]) {
- def saxe[T](body: => T) = catching[T](classOf[SAXException]) opt body
- def fail(msg: String) = System.err.println(msg)
-
- val parser: XMLReader =
- saxe[XMLReader](XMLReaderFactory.createXMLReader()) getOrElse (
- saxe[XMLReader](XMLReaderFactory.createXMLReader(XercesClassName)) getOrElse (
- return fail("Could not find an XML parser")
- )
- )
-
- // Need better namespace handling
- try parser.setFeature(namespacePrefixes, true)
- catch { case e: SAXException => return System.err.println(e) }
-
- if (args.isEmpty)
- return
-
- def dashR = args.size >= 2 && args(0) == "-r"
- val args2 = if (dashR) args drop 2 else args
- val resolver: Option[EntityResolver] =
- if (dashR) None
- else catching(classOf[Exception]) opt {
- val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver]
- parser setEntityResolver r
- r
- } orElse (return fail("Could not load requested EntityResolver"))
-
- for (arg <- args2) {
- try {
- val includer = new XIncludeFilter()
- includer setParent parser
- val s = new XIncluder(System.out, "UTF-8")
- includer setContentHandler s
-
- resolver map (includer setEntityResolver _)
- // SAXException here means will not support comments
- ignoring(classOf[SAXException]) {
- includer.setProperty(lexicalHandler, s)
- s setFilter includer
- }
- includer parse arg
- }
- catch {
- case e: SAXParseException =>
- fail(e.toString)
- fail("Problem in %s at line %d".format(e.getSystemId, e.getLineNumber))
- case e: SAXException =>
- fail(e.toString)
- }
- }
- }
-}
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 729769366e..9079b5f9c7 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -147,10 +147,10 @@ class XIncludeFilter extends XMLFilterImpl {
if (parse equals "text") {
val encoding = atts getValue "encoding"
- includeTextDocument(href, encoding);
+ includeTextDocument(href, encoding)
}
else if (parse equals "xml") {
- includeXMLDocument(href);
+ includeXMLDocument(href)
}
// Need to check this also in DOM and JDOM????
else {
@@ -184,7 +184,7 @@ class XIncludeFilter extends XMLFilterImpl {
}
}
- private var depth = 0;
+ private var depth = 0
override def startDocument() {
level = 0
@@ -240,7 +240,7 @@ class XIncludeFilter extends XMLFilterImpl {
}
locationString = (" in document included from " + publicID
+ " at " + systemID
- + " at line " + line + ", column " + column);
+ + " at line " + line + ", column " + column)
locationString
}
@@ -258,7 +258,7 @@ class XIncludeFilter extends XMLFilterImpl {
*/
private def includeTextDocument(url: String, encoding1: String) {
var encoding = encoding1
- if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8";
+ if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8"
var source: URL = null
try {
val base = bases.peek().asInstanceOf[URL]
@@ -275,7 +275,7 @@ class XIncludeFilter extends XMLFilterImpl {
try {
val uc = source.openConnection()
val in = new BufferedInputStream(uc.getInputStream())
- var encodingFromHeader = uc.getContentEncoding()
+ val encodingFromHeader = uc.getContentEncoding()
var contentType = uc.getContentType()
if (encodingFromHeader != null)
encoding = encodingFromHeader
@@ -284,13 +284,13 @@ class XIncludeFilter extends XMLFilterImpl {
// MIME types are case-insensitive
// Java may be picking this up from file URL
if (contentType != null) {
- contentType = contentType.toLowerCase();
+ contentType = contentType.toLowerCase()
if (contentType.equals("text/xml")
|| contentType.equals("application/xml")
|| (contentType.startsWith("text/") && contentType.endsWith("+xml") )
|| (contentType.startsWith("application/") && contentType.endsWith("+xml"))) {
- encoding = EncodingHeuristics.readEncodingFromStream(in);
- }
+ encoding = EncodingHeuristics.readEncodingFromStream(in)
+ }
}
}
val reader = new InputStreamReader(in, encoding)
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index 5064d6b3d8..8fcd66d4c0 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -6,11 +6,9 @@
** |/ **
\* */
-
package scala.xml
package include.sax
-import scala.xml.include._
import scala.collection.mutable
import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes }
import org.xml.sax.ext.LexicalHandler
@@ -30,7 +28,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def startDocument() {
try {
out.write("<?xml version='1.0' encoding='"
- + encoding + "'?>\r\n");
+ + encoding + "'?>\r\n")
}
catch {
case e:IOException =>
@@ -54,16 +52,16 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def startElement(namespaceURI: String, localName: String, qualifiedName: String, atts: Attributes) = {
try {
- out.write("<" + qualifiedName);
+ out.write("<" + qualifiedName)
var i = 0; while (i < atts.getLength()) {
- out.write(" ");
- out.write(atts.getQName(i));
- out.write("='");
- val value = atts.getValue(i);
+ out.write(" ")
+ out.write(atts.getQName(i))
+ out.write("='")
+ val value = atts.getValue(i)
// @todo Need to use character references if the encoding
// can't support the character
out.write(scala.xml.Utility.escape(value))
- out.write("'");
+ out.write("'")
i += 1
}
out.write(">")
@@ -89,20 +87,20 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def characters(ch: Array[Char], start: Int, length: Int) {
try {
var i = 0; while (i < length) {
- val c = ch(start+i);
- if (c == '&') out.write("&amp;");
- else if (c == '<') out.write("&lt;");
+ val c = ch(start+i)
+ if (c == '&') out.write("&amp;")
+ else if (c == '<') out.write("&lt;")
// This next fix is normally not necessary.
// However, it is required if text contains ]]>
// (The end CDATA section delimiter)
- else if (c == '>') out.write("&gt;");
- else out.write(c);
+ else if (c == '>') out.write("&gt;")
+ else out.write(c)
i += 1
}
}
catch {
case e: IOException =>
- throw new SAXException("Write failed", e);
+ throw new SAXException("Write failed", e)
}
}
@@ -140,8 +138,8 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
// if this is the source document, output a DOCTYPE declaration
if (entities.isEmpty) {
var id = ""
- if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"';
- else if (systemID != null) id = " SYSTEM \"" + systemID + '"';
+ if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"'
+ else if (systemID != null) id = " SYSTEM \"" + systemID + '"'
try {
out.write("<!DOCTYPE " + name + id + ">\r\n")
}
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
index 5f776f5299..8659d3f0c4 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/FactoryAdapter.scala
@@ -26,7 +26,7 @@ trait ConsoleErrorHandler extends DefaultHandler {
val s = "[%s]:%d:%d: %s".format(
errtype, ex.getLineNumber, ex.getColumnNumber, ex.getMessage)
Console.println(s)
- Console.flush
+ Console.flush()
}
}
@@ -91,7 +91,7 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
else {
var it = ch.slice(offset, offset + length).iterator
while (it.hasNext) {
- val c = it.next
+ val c = it.next()
val isSpace = c.isWhitespace
buffer append (if (isSpace) ' ' else c)
if (isSpace)
@@ -164,17 +164,17 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
*/
override def endElement(uri: String , _localName: String, qname: String): Unit = {
captureText()
- val metaData = attribStack.pop
+ val metaData = attribStack.pop()
// reverse order to get it right
val v = (Iterator continually hStack.pop takeWhile (_ != null)).toList.reverse
val (pre, localName) = splitName(qname)
- val scp = scopeStack.pop
+ val scp = scopeStack.pop()
// create element
rootElem = createNode(pre, localName, metaData, scp, v)
hStack push rootElem
- curTag = tagStack.pop
+ curTag = tagStack.pop()
capture = curTag != null && nodeContainsText(curTag) // root level
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index f9ff54d054..d289414c26 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -102,10 +102,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def ch: Char = {
if (nextChNeeded) {
if (curInput.hasNext) {
- lastChRead = curInput.next
+ lastChRead = curInput.next()
pos = curInput.pos
} else {
- val ilen = inpStack.length;
+ val ilen = inpStack.length
//Console.println(" ilen = "+ilen+ " extIndex = "+extIndex);
if ((ilen != extIndex) && (ilen > 0)) {
/** for external source, inpStack == Nil ! need notify of eof! */
@@ -138,10 +138,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def xmlProcInstr(): MetaData = {
xToken("xml")
- xSpace
+ xSpace()
val (md,scp) = xAttributes(TopScope)
if (scp != TopScope)
- reportSyntaxError("no xmlns definitions here, please.");
+ reportSyntaxError("no xmlns definitions here, please.")
xToken('?')
xToken('>')
md
@@ -154,11 +154,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var info_enc: Option[String] = None
var info_stdl: Option[Boolean] = None
- var m = xmlProcInstr()
+ val m = xmlProcInstr()
var n = 0
if (isProlog)
- xSpaceOpt
+ xSpaceOpt()
m("version") match {
case null =>
@@ -199,11 +199,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* // this is a bit more lenient than necessary...
* }}} */
def prolog(): (Option[String], Option[String], Option[Boolean]) =
- prologOrTextDecl(true)
+ prologOrTextDecl(isProlog = true)
/** prolog, but without standalone */
def textDecl(): (Option[String], Option[String]) =
- prologOrTextDecl(false) match { case (x1, x2, _) => (x1, x2) }
+ prologOrTextDecl(isProlog = false) match { case (x1, x2, _) => (x1, x2) }
/** {{{
* [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
@@ -223,10 +223,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
return null
}
- nextch // is prolog ?
+ nextch() // is prolog ?
var children: NodeSeq = null
if ('?' == ch) {
- nextch
+ nextch()
info_prolog = prolog()
doc.version = info_prolog._1
doc.encoding = info_prolog._2
@@ -247,7 +247,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case _:ProcInstr =>
case _:Comment =>
case _:EntityRef => // todo: fix entities, shouldn't be "special"
- reportSyntaxError("no entity references allowed here");
+ reportSyntaxError("no entity references allowed here")
case s:SpecialNode =>
if (s.toString.trim().length > 0) //non-empty text nodes not allowed
elemCount += 2
@@ -272,7 +272,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* after construction, this method formalizes that suboptimal reality.
*/
def initialize: this.type = {
- nextch
+ nextch()
this
}
@@ -303,10 +303,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var scope: NamespaceBinding = pscope
var aMap: MetaData = Null
while (isNameStart(ch)) {
- val pos = this.pos
-
val qname = xName
- val _ = xEQ
+ xEQ() // side effect
val value = xAttributeValue()
Utility.prefix(qname) match {
@@ -326,11 +324,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
if ((ch != '/') && (ch != '>') && ('?' != ch))
- xSpace
+ xSpace()
}
if(!aMap.wellformed(scope))
- reportSyntaxError( "double attribute");
+ reportSyntaxError( "double attribute")
(aMap,scope)
}
@@ -343,12 +341,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
*/
def xEntityValue(): String = {
val endch = ch
- nextch
+ nextch()
while (ch != endch && !eof) {
putChar(ch)
- nextch
+ nextch()
}
- nextch
+ nextch()
val str = cbuf.toString()
cbuf.length = 0
str
@@ -377,13 +375,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
val sb: StringBuilder = new StringBuilder()
xToken("--")
while (true) {
- if (ch == '-' && { sb.append(ch); nextch; ch == '-' }) {
+ if (ch == '-' && { sb.append(ch); nextch(); ch == '-' }) {
sb.length = sb.length - 1
- nextch
+ nextch()
xToken('>')
return handle.comment(pos, sb.toString())
} else sb.append(ch)
- nextch
+ nextch()
}
throw FatalError("this cannot happen")
}
@@ -391,10 +389,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
/* todo: move this into the NodeBuilder class */
def appendText(pos: Int, ts: NodeBuffer, txt: String): Unit = {
if (preserveWS)
- ts &+ handle.text(pos, txt);
+ ts &+ handle.text(pos, txt)
else
for (t <- TextBuffer.fromString(txt).toText) {
- ts &+ handle.text(pos, t.text);
+ ts &+ handle.text(pos, t.text)
}
}
@@ -404,7 +402,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
ch match {
case '!' =>
- nextch
+ nextch()
if ('[' == ch) // CDATA
ts &+ xCharData
else if ('D' == ch) // doctypedecl, parse DTD // @todo REMOVE HACK
@@ -412,7 +410,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
else // comment
ts &+ xComment
case '?' => // PI
- nextch
+ nextch()
ts &+ xProcInstr
case _ =>
ts &+ element1(pscope) // child
@@ -423,7 +421,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* content1 ::= '<' content1 | '&' charref ...
* }}} */
def content(pscope: NamespaceBinding): NodeSeq = {
- var ts = new NodeBuffer
+ val ts = new NodeBuffer
var exit = eof
// todo: optimize seq repr.
def done = new NodeSeq { val theSeq = ts.toList }
@@ -437,18 +435,18 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
ch match {
case '<' => // another tag
- nextch; ch match {
+ nextch(); ch match {
case '/' => exit = true // end tag
case _ => content1(pscope, ts)
}
// postcond: xEmbeddedBlock == false!
case '&' => // EntityRef or CharRef
- nextch; ch match {
+ nextch(); ch match {
case '#' => // CharacterRef
- nextch
- val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch))
- xToken(';');
+ nextch()
+ val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch()))
+ xToken(';')
ts &+ theChar
case _ => // EntityRef
val n = xName
@@ -472,16 +470,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def externalID(): ExternalID = ch match {
case 'S' =>
- nextch
+ nextch()
xToken("YSTEM")
- xSpace
+ xSpace()
val sysID = systemLiteral()
new SystemID(sysID)
case 'P' =>
- nextch; xToken("UBLIC")
- xSpace
+ nextch(); xToken("UBLIC")
+ xSpace()
val pubID = pubidLiteral()
- xSpace
+ xSpace()
val sysID = systemLiteral()
new PublicID(pubID, sysID)
}
@@ -497,13 +495,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (this.dtd ne null)
reportSyntaxError("unexpected character (DOCTYPE already defined")
xToken("DOCTYPE")
- xSpace
+ xSpace()
val n = xName
- xSpace
+ xSpace()
//external ID
if ('S' == ch || 'P' == ch) {
extID = externalID()
- xSpaceOpt
+ xSpaceOpt()
}
/* parse external subset of DTD
@@ -520,12 +518,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
if ('[' == ch) { // internal subset
- nextch
+ nextch()
/* TODO */
intSubset()
// TODO: do the DTD parsing?? ?!?!?!?!!
xToken(']')
- xSpaceOpt
+ xSpaceOpt()
}
xToken('>')
this.dtd = new DTD {
@@ -582,8 +580,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var exit = false
while (! exit) {
putChar(ch)
- val opos = pos
- nextch
+ nextch()
exit = eof || ( ch == '<' ) || ( ch == '&' )
}
@@ -600,13 +597,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def systemLiteral(): String = {
val endch = ch
if (ch != '\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected");
- nextch
+ reportSyntaxError("quote ' or \" expected")
+ nextch()
while (ch != endch && !eof) {
putChar(ch)
- nextch
+ nextch()
}
- nextch
+ nextch()
val str = cbuf.toString()
cbuf.length = 0
str
@@ -618,16 +615,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def pubidLiteral(): String = {
val endch = ch
if (ch!='\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected");
- nextch
+ reportSyntaxError("quote ' or \" expected")
+ nextch()
while (ch != endch && !eof) {
putChar(ch)
//println("hello '"+ch+"'"+isPubIDChar(ch))
if (!isPubIDChar(ch))
reportSyntaxError("char '"+ch+"' is not allowed in public id")
- nextch
+ nextch()
}
- nextch
+ nextch()
val str = cbuf.toString
cbuf.length = 0
str
@@ -640,9 +637,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def extSubset(): Unit = {
var textdecl: (Option[String],Option[String]) = null
if (ch == '<') {
- nextch
+ nextch()
if (ch == '?') {
- nextch
+ nextch()
textdecl = textDecl()
} else
markupDecl1()
@@ -653,13 +650,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def markupDecl1() = {
def doInclude() = {
- xToken('['); while(']' != ch) markupDecl(); nextch // ']'
+ xToken('['); while(']' != ch) markupDecl(); nextch() // ']'
}
def doIgnore() = {
- xToken('['); while(']' != ch) nextch; nextch // ']'
+ xToken('['); while(']' != ch) nextch(); nextch() // ']'
}
if ('?' == ch) {
- nextch
+ nextch()
xProcInstr // simply ignore processing instructions!
} else {
xToken('!')
@@ -668,35 +665,35 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
xComment // ignore comments
case 'E' =>
- nextch
+ nextch()
if ('L' == ch) {
- nextch
+ nextch()
elementDecl()
} else
entityDecl()
case 'A' =>
- nextch
+ nextch()
attrDecl()
case 'N' =>
- nextch
+ nextch()
notationDecl()
case '[' if inpStack.length >= extIndex =>
- nextch
- xSpaceOpt
+ nextch()
+ xSpaceOpt()
ch match {
case '%' =>
- nextch
+ nextch()
val ent = xName
xToken(';')
- xSpaceOpt
+ xSpaceOpt()
push(ent)
- xSpaceOpt
+ xSpaceOpt()
val stmt = xName
- xSpaceOpt
+ xSpaceOpt()
stmt match {
// parameter entity
@@ -704,15 +701,15 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case "IGNORE" => doIgnore()
}
case 'I' =>
- nextch
+ nextch()
ch match {
case 'G' =>
- nextch
+ nextch()
xToken("NORE")
- xSpaceOpt
+ xSpaceOpt()
doIgnore()
case 'N' =>
- nextch
+ nextch()
xToken("NCLUDE")
doInclude()
}
@@ -723,14 +720,14 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case _ =>
curInput.reportError(pos, "unexpected character '"+ch+"', expected some markupdecl")
while (ch!='>')
- nextch
+ nextch()
}
}
}
def markupDecl(): Unit = ch match {
case '%' => // parameter entity reference
- nextch
+ nextch()
val ent = xName
xToken(';')
if (!isValidating)
@@ -740,20 +737,20 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
//peReference
case '<' =>
- nextch
+ nextch()
markupDecl1()
case _ if isSpace(ch) =>
- xSpace
+ xSpace()
case _ =>
reportSyntaxError("markupdecl: unexpected character '"+ch+"' #" + ch.toInt)
- nextch
+ nextch()
}
/** "rec-xml/#ExtSubset" pe references may not occur within markup declarations
*/
def intSubset() {
//Console.println("(DEBUG) intSubset()")
- xSpace
+ xSpace()
while (']' != ch)
markupDecl()
}
@@ -762,16 +759,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
*/
def elementDecl() {
xToken("EMENT")
- xSpace
+ xSpace()
val n = xName
- xSpace
+ xSpace()
while ('>' != ch) {
//Console.println("["+ch+"]")
putChar(ch)
- nextch
+ nextch()
}
//Console.println("END["+ch+"]")
- nextch
+ nextch()
val cmstr = cbuf.toString()
cbuf.length = 0
handle.elemDecl(n, cmstr)
@@ -782,44 +779,44 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def attrDecl() = {
xToken("TTLIST")
- xSpace
+ xSpace()
val n = xName
- xSpace
+ xSpace()
var attList: List[AttrDecl] = Nil
// later: find the elemDecl for n
while ('>' != ch) {
val aname = xName
- xSpace
+ xSpace()
// could be enumeration (foo,bar) parse this later :-/
while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) {
if (!isSpace(ch))
cbuf.append(ch)
- nextch
+ nextch()
}
val atpe = cbuf.toString
cbuf.length = 0
val defdecl: DefaultDecl = ch match {
case '\'' | '"' =>
- DEFAULT(false, xAttributeValue())
+ DEFAULT(fixed = false, xAttributeValue())
case '#' =>
- nextch
+ nextch()
xName match {
- case "FIXED" => xSpace ; DEFAULT(true, xAttributeValue())
+ case "FIXED" => xSpace() ; DEFAULT(fixed = true, xAttributeValue())
case "IMPLIED" => IMPLIED
case "REQUIRED" => REQUIRED
}
case _ =>
null
}
- xSpaceOpt
+ xSpaceOpt()
attList ::= AttrDecl(aname, atpe, defdecl)
cbuf.length = 0
}
- nextch
+ nextch()
handle.attListDecl(n, attList.reverse)
}
@@ -828,41 +825,40 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def entityDecl() = {
var isParameterEntity = false
- var entdef: EntityDef = null
xToken("NTITY")
- xSpace
+ xSpace()
if ('%' == ch) {
- nextch
+ nextch()
isParameterEntity = true
- xSpace
+ xSpace()
}
val n = xName
- xSpace
+ xSpace()
ch match {
case 'S' | 'P' => //sy
val extID = externalID()
if (isParameterEntity) {
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
handle.parameterEntityDecl(n, ExtDef(extID))
} else { // notation?
- xSpace
+ xSpace()
if ('>' != ch) {
xToken("NDATA")
- xSpace
+ xSpace()
val notat = xName
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
handle.unparsedEntityDecl(n, extID, notat)
} else {
- nextch
+ nextch()
handle.parsedEntityDecl(n, ExtDef(extID))
}
}
case '"' | '\'' =>
val av = xEntityValue()
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
if (isParameterEntity)
handle.parameterEntityDecl(n, IntDef(av))
@@ -877,29 +873,29 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def notationDecl() {
xToken("OTATION")
- xSpace
+ xSpace()
val notat = xName
- xSpace
+ xSpace()
val extID = if (ch == 'S') {
externalID()
}
else if (ch == 'P') {
/** PublicID (without system, only used in NOTATION) */
- nextch
+ nextch()
xToken("UBLIC")
- xSpace
+ xSpace()
val pubID = pubidLiteral()
- xSpaceOpt
+ xSpaceOpt()
val sysID = if (ch != '>')
systemLiteral()
else
- null;
+ null
new PublicID(pubID, sysID)
} else {
- reportSyntaxError("PUBLIC or SYSTEM expected");
+ reportSyntaxError("PUBLIC or SYSTEM expected")
scala.sys.error("died parsing notationdecl")
}
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
handle.notationDecl(notat, extID)
}
@@ -916,7 +912,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
ch
curInput = replacementText(entityName)
- nextch
+ nextch()
}
def pushExternal(systemId: String) {
@@ -927,7 +923,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
ch
curInput = externalSource(systemId)
- nextch
+ nextch()
}
def pop() {
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index da640484e0..7bfbcc7fff 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -10,7 +10,6 @@ package scala.xml
package parsing
import scala.io.Source
-import scala.xml.dtd._
import scala.annotation.switch
import Utility.Escapes.{ pairs => unescape }
@@ -39,7 +38,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
*/
protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
val name = xName
- xSpaceOpt
+ xSpaceOpt()
(name, mkAttributes(name, pscope))
}
@@ -50,7 +49,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
*/
def xProcInstr: ElementType = {
val n = xName
- xSpaceOpt
+ xSpaceOpt()
xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
}
@@ -78,7 +77,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
private def takeUntilChar(it: Iterator[Char], end: Char): String = {
val buf = new StringBuilder
- while (it.hasNext) it.next match {
+ while (it.hasNext) it.next() match {
case `end` => return buf.toString
case ch => buf append ch
}
@@ -92,7 +91,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
if (xName != startName)
errorNoEnd(startName)
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
}
@@ -139,9 +138,9 @@ private[scala] trait MarkupParserCommon extends TokenTests {
val buf = new StringBuilder
val it = attval.iterator.buffered
- while (it.hasNext) buf append (it.next match {
+ while (it.hasNext) buf append (it.next() match {
case ' ' | '\t' | '\n' | '\r' => " "
- case '&' if it.head == '#' => it.next ; xCharRef(it)
+ case '&' if it.head == '#' => it.next() ; xCharRef(it)
case '&' => attr_unescape(takeUntilChar(it, ';'))
case c => c
})
@@ -158,11 +157,11 @@ private[scala] trait MarkupParserCommon extends TokenTests {
Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
def xCharRef(it: Iterator[Char]): String = {
- var c = it.next
- Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _)
+ var c = it.next()
+ Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _)
}
- def xCharRef: String = xCharRef(() => ch, () => nextch)
+ def xCharRef: String = xCharRef(() => ch, () => nextch())
/** Create a lookahead reader which does not influence the input */
def lookahead(): BufferedIterator[Char]
@@ -195,20 +194,20 @@ private[scala] trait MarkupParserCommon extends TokenTests {
}
def xToken(that: Char) {
- if (ch == that) nextch
+ if (ch == that) nextch()
else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
}
def xToken(that: Seq[Char]) { that foreach xToken }
/** scan [S] '=' [S]*/
- def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt }
+ def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() }
/** skip optional space S? */
- def xSpaceOpt() = while (isSpace(ch) && !eof) nextch
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch()
/** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
def xSpace() =
- if (isSpace(ch)) { nextch; xSpaceOpt }
+ if (isSpace(ch)) { nextch(); xSpaceOpt() }
else xHandleError(ch, "whitespace expected")
/** Apply a function and return the passed value */
@@ -241,7 +240,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
truncatedError("") // throws TruncatedXMLControl in compiler
sb append ch
- nextch
+ nextch()
}
unreachable
}
@@ -254,7 +253,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
private def peek(lookingFor: String): Boolean =
(lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
// drop the chars from the real reader (all lookahead + orig)
- (0 to lookingFor.length) foreach (_ => nextch)
+ (0 to lookingFor.length) foreach (_ => nextch())
true
}
}
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
index 0edea043a5..018ae4d2cd 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
@@ -50,8 +50,8 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
log("advanceDFA(trans): " + trans)
trans.get(ContentModel.ElemName(label)) match {
case Some(qNew) => qCurrent = qNew
- case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys);
- }
+ case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys)
+ }
}
// advance in current automaton
log("[qCurrent = "+qCurrent+" visiting "+label+"]")
@@ -106,7 +106,7 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
}
final override def notationDecl(notat: String, extID: ExternalID) {
- decls = NotationDecl(notat, extID) :: decls;
+ decls = NotationDecl(notat, extID) :: decls
}
final override def peReference(name: String) {
diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala
index d08cb1fa9c..33b94c9bd7 100644
--- a/src/library/scala/xml/parsing/XhtmlParser.scala
+++ b/src/library/scala/xml/parsing/XhtmlParser.scala
@@ -26,5 +26,5 @@ class XhtmlParser(val input: Source) extends ConstructingHandler with MarkupPars
* @author Burak Emir
*/
object XhtmlParser {
- def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document
+ def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document()
}
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala
index 916a1a0cf7..fc510b5f18 100644
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ b/src/library/scala/xml/persistent/CachedFileStorage.scala
@@ -76,8 +76,8 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
log("[load]\nloading "+theFile)
val src = Source.fromFile(theFile)
log("parsing "+theFile)
- val res = ConstructingParser.fromSource(src,false).document.docElem(0)
- switch
+ val res = ConstructingParser.fromSource(src,preserveWS = false).document.docElem(0)
+ switch()
log("[load done]")
res.child.iterator
}
@@ -94,7 +94,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
// @todo: optimize
val storageNode = <nodes>{ nodes.toList }</nodes>
val w = Channels.newWriter(c, "utf-8")
- XML.write(w, storageNode, "utf-8", true, null)
+ XML.write(w, storageNode, "utf-8", xmlDecl = true, doctype = null)
log("writing to "+theFile)
@@ -102,7 +102,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
c.close
fos.close
dirty = false
- switch
+ switch()
log("[save done]")
}
@@ -112,7 +112,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
log("[run]\nstarting storage thread, checking every "+interval+" ms")
while (true) {
Thread.sleep( this.interval )
- save
+ save()
}
}
@@ -120,6 +120,6 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
* update. */
def flush() = {
this.dirty = true
- save
+ save()
}
}
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala
index 20a5bb6767..d16c71c9f7 100644
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ b/src/library/scala/xml/persistent/SetStorage.scala
@@ -20,16 +20,14 @@ import java.io.File
*/
class SetStorage(file: File) extends CachedFileStorage(file) {
- private var theSet: mutable.HashSet[Node] = new mutable.HashSet[Node]
+ private val theSet = mutable.HashSet[Node]()
// initialize
{
val it = super.initialNodes
dirty = it.hasNext
- for(x <- it) {
- theSet += x;
- }
+ theSet ++= it
}
/* forwarding methods to hashset*/
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index 428c305055..3f9584fd04 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -139,10 +139,10 @@ trait ProducerConsumerIterator[T >: Null] extends Iterator[T] {
def hasNext = !eos && (buffer != null || fillBuffer)
def next() = {
- if (eos) throw new NoSuchElementException("ProducerConsumerIterator")
- if (buffer == null) fillBuffer
+ if (eos()) throw new NoSuchElementException("ProducerConsumerIterator")
+ if (buffer == null) fillBuffer()
- drainBuffer
+ drainBuffer()
}
def available() = isElement(buffer) || isElement(queue.peek)
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
index 1402ccd6aa..e427071177 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/library/scala/xml/transform/BasicTransformer.scala
@@ -53,7 +53,7 @@ abstract class BasicTransformer extends Function1[Node,Node]
def apply(n: Node): Node = {
val seq = transform(n)
if (seq.length > 1)
- throw new UnsupportedOperationException("transform must return single node for root");
+ throw new UnsupportedOperationException("transform must return single node for root")
else seq.head
}
}
diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/library/scala/xml/transform/RewriteRule.scala
index 1dca495a10..13210a6fd2 100644
--- a/src/library/scala/xml/transform/RewriteRule.scala
+++ b/src/library/scala/xml/transform/RewriteRule.scala
@@ -11,8 +11,8 @@
package scala.xml
package transform
-/** a RewriteRule, when applied to a term, yields either
- * the resulting of rewriting or the term itself it the rule
+/** A RewriteRule, when applied to a term, yields either
+ * the result of rewriting the term or the term itself if the rule
* is not applied.
*
* @author Burak Emir
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 13b1fd58e0..1c0c7c4a96 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -146,13 +146,6 @@ object scalac extends Command {
CmdOption("sourcepath", Argument("path")),
"Specify location(s) of source files."),
Definition(
- CmdOptionBound("target:", Argument("target")),
- SeqPara(
- "Specify which backend to use (" & Mono("jvm-1.5," &
- "msil") & ").",
- "The default value is " & Mono("\"jvm-1.5\"") & " (was " &
- Mono("\"jvm-1.4\"") & " up to Scala version 2.6.1).")),
- Definition(
CmdOption("toolcp", Argument("path")),
"Add to the runner classpath."),
Definition(
@@ -182,19 +175,6 @@ object scalac extends Command {
Section("Advanced Options",
DefinitionList(
Definition(
- CmdOption("Xassem-extdirs", Argument("dirs")),
- "(Requires " & Mono("-target:msil") &
- ") List of directories containing assemblies." &
- " default:" & Mono("lib") & "."),
- Definition(
- CmdOption("Xassem-name", Argument("file")),
- "(Requires " & Mono("-target:msil") &
- ") Name of the output assembly."),
- Definition(
- CmdOption("Xassem-path", Argument("path")),
- "(Requires " & Mono("-target:msil") &
- ") List of assemblies referenced by the program."),
- Definition(
CmdOption("Xcheck-null"),
"Warn upon selection of nullable reference"),
Definition(
@@ -290,10 +270,6 @@ object scalac extends Command {
CmdOption("Xsource-reader", Argument("classname")),
"Specify a custom method for reading source files."),
Definition(
- CmdOption("Xsourcedir", Argument("path")),
- "(Requires " & Mono("-target:msil") &
- ") Mirror source folder structure in output directory.."),
- Definition(
CmdOption("Xverify"),
"Verify generic signatures in generated bytecode."),
Definition(
diff --git a/src/manual/scala/tools/docutil/resources/index.html b/src/manual/scala/tools/docutil/resources/index.html
index aaef94d400..18e2343930 100644
--- a/src/manual/scala/tools/docutil/resources/index.html
+++ b/src/manual/scala/tools/docutil/resources/index.html
@@ -8,7 +8,7 @@
<meta http-equiv="Content-Style-Type" content="text/css"/>
<meta http-equiv="Content-Language" content="en"/>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>
- <meta name="Copyright" content="(C) 2002-2012 LAMP/EPFL"/>
+ <meta name="Copyright" content="(C) 2002-2013 LAMP/EPFL"/>
<meta name="Language" content="en"/>
<meta name="Description" content="The Scala Programming Language"/>
<meta name="Author" content="Stephane Micheloud"/>
@@ -180,7 +180,7 @@
<hr/>
<div style="font-size:x-small;">
- Copyright (c) 2002-2012 <a href="http://www.epfl.ch/">EPFL</a>,
+ Copyright (c) 2002-2013 <a href="http://www.epfl.ch/">EPFL</a>,
Lausanne, unless specified otherwise.<br/>
All rights reserved.
</div>
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
deleted file mode 100644
index 59bbeee3a4..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.AssemblyDef;
-import ch.epfl.lamp.compiler.msil.util.Table.ModuleDef;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.io.File;
-import java.io.FileNotFoundException;
-
-/**
- * Defines an Assembly, which is a reusable, versionable, and self-describing
- * building block of a common language runtime application.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Assembly extends CustomAttributeProvider {
-
- //##########################################################################
- // static members
-
- // all the assemblies
- public static final HashMap assemblies = new HashMap();
-
- /** Loads an assembly from the specified path. */
- public static Assembly LoadFrom(String assemblyFileName) {
- File afile = new File(assemblyFileName);
- return LoadFrom(afile.getParentFile(), afile.getName());
- }
-
- /** Loads an assembly with the given name from the given directory. */
- public static Assembly LoadFrom(File dir, String name) {
- File file = null;
- PEFile pefile = null;
-// try {
-// if (dir == null)
-// dir = new File(".");
-// dir = dir.getCanonicalFile();
-// } catch (java.io.IOException e) {}
-
- if (name.toUpperCase().endsWith(".EXE") || name.toUpperCase().endsWith(".DLL")) {
- file = new File(dir, name);
- pefile = getPEFile(file);
- name = name.substring(0, name.length() - 4);
- }
-
- File adir = pefile == null ? new File(dir, name) : null;
-
- if (pefile == null) {
- file = new File(dir, name + ".dll");
- pefile = getPEFile(file);
- }
- if (pefile == null) {
- file = new File(dir, name + ".DLL");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".dll");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".DLL");
- pefile = getPEFile(file);
- }
-
- if (pefile == null) {
- file = new File(dir, name + ".exe");
- pefile = getPEFile(file);
- }
- if (pefile == null) {
- file = new File(dir, name + ".EXE");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".exe");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".EXE");
- pefile = getPEFile(file);
- }
-
- if (pefile == null)
- throw new RuntimeException("Cannot find assembly " + new File(dir, name));
- return getPEAssembly(pefile);
- }
-
- private static Assembly getPEAssembly(PEFile pefile) {
- AssemblyDef assem = pefile.AssemblyDef;
- if (assem == null)
- throw new RuntimeException("File " + pefile
- + " does not contain a manifest");
- assem.readRow(1);
- String name = pefile.getString(assem.Name);
- Assembly a = (Assembly) assemblies.get(name);
- if (a != null) {
- return a;
- }
-
- AssemblyName an = new AssemblyName();
- an.Name = pefile.getString(assem.Name);
- an.Version = new Version(assem.MajorVersion, assem.MinorVersion,
- assem.BuildNumber, assem.RevisionNumber);
- an.SetPublicKey(pefile.getBlob(assem.PublicKey));
- return new PEAssembly(pefile, an);
- }
-
- protected static PEFile getPEFile(File f) {
- PEFile pefile = null;
- try { pefile = new PEFile(f.getAbsolutePath()); }
- catch (FileNotFoundException e) {}
- catch (RuntimeException e) {
- java.lang.System.out.println("swallowed RuntimeException at getPEFile");
- }
- return pefile;
- }
-
- //##########################################################################
- // public fields
-
- /** The entry point of this assembly. */
- public MethodInfo EntryPoint;
-
- /** the display name of the assembly. */
- public final String FullName;
-
- //##########################################################################
- // constructor
-
- protected Assembly(AssemblyName an, boolean external) {
- assemblyName = an;
- FullName = an.toString();
- if(external) {
- assemblies.put(an.Name, this);
- }
- //System.out.println("assemblies after adding the current one: " + assemblies);
- }
-
- protected Assembly(AssemblyName an) {
- this(an, false);
- }
-
- protected static Assembly getAssembly(String name) {
- return (Assembly) assemblies.get(name);
- }
-
- //##########################################################################
- // instrumental methods
-
- /** @return the file from which this assembly was loaded. */
- public File getFile() {
- throw new RuntimeException("Not supported");
- }
-
- /** Gets the specified module in this assembly. Works on filenames. */
- public Module GetModule(String name) {
- initModules();
- return (Module)modulesMap.get(name);
- }
-
- /** Get all the modules of the assembly. */
- public Module[] GetModules() {
- initModules();
- return (Module[])modulesMap.values().
- toArray(new Module[modulesMap.size()]);
- }
-
- /** Get the corresponding type. */
- public Type GetType(String name) {
- initModules();
- Iterator modules = modulesMap.values().iterator();
- Type t = null;
- while (t == null && modules.hasNext()) {
- t = ((Module)modules.next()).GetType(name);
- }
- return t;
- }
-
- /** @return an array of all types defined in the assembly. */
- public synchronized Type[] GetTypes() {
- if (types != null)
- return (Type[])types.clone();
- initModules();
-
- Iterator modules = modulesMap.values().iterator();
- Type[] newTypes = ((Module)modules.next()).GetTypes();
- while (modules.hasNext()) {
- Module module = (Module)modules.next();
- Type[] mtypes = module.GetTypes();
- Type[] oldTypes = newTypes;
- newTypes = new Type[oldTypes.length + mtypes.length];
- System.arraycopy(oldTypes, 0, newTypes, 0, oldTypes.length);
- System.arraycopy(mtypes, 0, newTypes, oldTypes.length, mtypes.length);
- }
- types = newTypes;
- return (Type[]) types.clone();
- }
-
- public AssemblyName GetName() {
- return assemblyName;
- }
-
- public String toString() {
- return FullName;
- }
-
- //##########################################################################
- // protected members
-
- // the assembly name
- protected final AssemblyName assemblyName;
-
- // all the types exported by the assembly
- protected Type[] types = null;
-
- // the module defined in this assembly (only one right now)
- private final HashMap/*<String, Module>*/ modulesMap = new HashMap();
-
- protected void addType(Type type) {
- Type.addType(type);
- }
-
- protected void addModule(String name, Module module) {
- modulesMap.put(name, module);
- }
-
- private boolean initModules = true;
- protected final void initModules() {
- if (initModules) {
- loadModules();
- initModules = false;
- }
- }
-
- /** used for lazy construction of the Assembly. */
- protected abstract void loadModules();
-
- void dumpTypes() {
- Type[] types = GetTypes();
- for (int i = 0; i < types.length; i++)
- System.out.println(types[i]);
- }
-
- //##########################################################################
-
-} // class Assembly
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java b/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
deleted file mode 100644
index acdcb32e33..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import javax.crypto.Mac;
-
-import java.security.MessageDigest;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-/**
- * Fully describes an assembly's unique identity.
- * Right now it's only the name
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class AssemblyName {
-
- //##########################################################################
- // public interface
-
- /** The simple, unencrypted name of the assembly. */
- public String Name;
-
- /**
- * Gets or sets the major, minor, revision, and build numbers
- * of the assembly.
- */
- public Version Version;
-
- /**
- * Gets a strong name consisting of a public key, a given name,
- * and version parts.
- */
- public byte[] GetPublicKeyToken() {
- return publicKeyToken == null ? null : (byte[]) publicKeyToken.clone();
- }
-
- /**
- * Sets a strong name consisting of a public key, a given name,
- * and version parts.
- */
- public void SetPublicKeyToken(byte[] key) {
- this.publicKeyToken = key.length == 0 ? null : (byte[]) key.clone();
- }
-
- /**
- * Returns the public key identifying the originator of the assembly.
- */
- public byte[] GetPublicKey() {
- return publicKey == null ? null : (byte[]) publicKey.clone();
- }
-
- /**
- * Sets the public key identifying the originator of the assembly.
- */
- public void SetPublicKey(byte[] key) {
- if (key.length > 0) {
- this.publicKey = (byte[]) key.clone();
- byte[] hash = sha.digest(key);
- byte[] keyToken = new byte[8];
- for (int i = 0; i < keyToken.length; i++)
- keyToken[i] = hash[hash.length - 1 - i];
- this.publicKeyToken = keyToken;
- //System.out.println("Pubic key and key token of assembly " + this + ":");
- //System.out.println("\tPublic key = " + Table.bytes2hex(key));
- //System.out.println("\tKey token = " + Table.bytes2hex(keyToken));
- }
- }
-
- public String toString() {
- return Name + ", Version=" + Version;
- }
-
- //##########################################################################
-
- private byte[] publicKeyToken;
-
- private byte[] publicKey;
-
- private static final MessageDigest sha;
- static {
- MessageDigest md = null;
- try {
- md = MessageDigest.getInstance("SHA");
- } catch (java.security.NoSuchAlgorithmException e) {}
- sha = md;
- }
-
- //##########################################################################
-
-} // class AssemblyName
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
deleted file mode 100644
index 0f2c4e6764..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
+++ /dev/null
@@ -1,654 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Signature;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Iterator;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.io.UnsupportedEncodingException;
-
-/**
- * Describes custom attribute instances.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class Attribute {
-
- //##########################################################################
-
- private final ConstructorInfo constr;
-
- private final byte[] value;
-
- Attribute(ConstructorInfo constr, byte[] value) {
- assert constr != null;
- this.constr = constr;
- assert value != null : constr.toString();
- this.value = value;
- }
-
- //##########################################################################
- // public interface
-
- /** @return the type (class) of the attribute. */
- public Type GetType() { return constr.DeclaringType; }
-
- /** @return the constructor of this attribute. */
- public ConstructorInfo getConstructor() {
- return constr;
- }
-
- /** @return the Blob with serialized constructor & named arguments. */
- public byte[] getValue() {
- byte[] value = new byte[this.value.length];
- System.arraycopy(this.value, 0, value, 0, value.length);
- return value;
- }
-
- /**@return an array with the arguments to the attribute's constructor. */
- public Object[] getConstructorArguments() {
- parseBlob();
- Object[] cas = new Object[constrArgs.length];
- System.arraycopy(constrArgs, 0, cas, 0, cas.length);
- return cas;
- }
-
- /** @return the named argument with the given name. */
- public NamedArgument getNamedArgument(String name) {
- return (NamedArgument)namedArgs.get(name);
- }
-
- /** @return an array of all named arguments for this attribute. */
- public NamedArgument[] getNamedArguments() {
- NamedArgument[] nargs =
- (NamedArgument[])namedArgs.values().toArray(NamedArgument.EMPTY);
- return nargs;
- }
-
- /** @return a string representation of this attribute. */
- public String toString() {
- parseBlob();
- ParameterInfo[] params = constr.GetParameters();
- assert params.length == constrArgs.length : this.constr;
- StringBuffer str = new StringBuffer();
- str.append('[');
- str.append(constr.DeclaringType.FullName);
- str.append('(');
- for (int i = 0; i < constrArgs.length; i++) {
- if (i > 0)
- str.append(", ");
- Type t = params[i].ParameterType;
- if (t.IsEnum()) {
- str.append('(');
- str.append(t.FullName);
- str.append(')');
- }
- formatValue(str, constrArgs[i]);
- }
- NamedArgument[] nargs = getNamedArguments();
- for (int i = 0; i < nargs.length; i++) {
- str.append(", ").append(nargs[i]);
- }
- str.append(")]");
- return str.toString();
- }
-
- //#########################################################################
-
- private static final Map type2id = new HashMap();
- private static final Map id2type = new HashMap();
- static {
- map("Boolean", Signature.ELEMENT_TYPE_BOOLEAN);
- map("Char", Signature.ELEMENT_TYPE_CHAR);
- map("SByte", Signature.ELEMENT_TYPE_I1);
- map("Byte", Signature.ELEMENT_TYPE_U1);
- map("Int16", Signature.ELEMENT_TYPE_I2);
- map("UInt16", Signature.ELEMENT_TYPE_U2);
- map("Int32", Signature.ELEMENT_TYPE_I4);
- map("UInt32", Signature.ELEMENT_TYPE_U4);
- map("Int64", Signature.ELEMENT_TYPE_I8);
- map("UInt64", Signature.ELEMENT_TYPE_U8);
- map("Single", Signature.ELEMENT_TYPE_R4);
- map("Double", Signature.ELEMENT_TYPE_R8);
- map("String", Signature.ELEMENT_TYPE_STRING);
- map("Type", Signature.X_ELEMENT_TYPE_TYPE);
- map("Object", Signature.ELEMENT_TYPE_OBJECT);
- }
- private static void map(String type, int id) {
- Type t = Type.GetType("System." + type);
- assert type != null : type + " -> " + id;
- Integer i = new Integer(id);
- type2id.put(t, i);
- id2type.put(i, t);
- }
- private static int getTypeId(Type type) {
- Integer id = (Integer)type2id.get(type);
- assert id != null : type;
- return id.intValue();
- }
-
- private Object[] constrArgs;
- private Map namedArgs;
- private ByteBuffer buf;
-
- private void parseBlob() {
- try { parseBlob0(); }
- catch (RuntimeException e) {
- throw new RuntimeException(PEFile.bytes2hex(value), e);
- }
- }
-
- private void parseBlob0() {
- if (buf != null)
- return;
- buf = ByteBuffer.wrap(value); // Sec. 23.3 in Partition II of CLR Spec.
- buf.order(ByteOrder.LITTLE_ENDIAN);
-
- short sig = buf.getShort(); // Prolog
- assert sig == 1 : PEFile.bytes2hex(value);
- ParameterInfo[] params = constr.GetParameters();
- constrArgs = new Object[params.length];
- for (int i = 0; i < params.length; i++) {
- constrArgs[i] = parseFixedArg(params[i].ParameterType); // FixedArg
- }
-
- int ncount = buf.getShort(); // NumNamed
- namedArgs = new LinkedHashMap();
- for (int i = 0; i < ncount; i++) {
- int designator = buf.get(); // designator one of 0x53 (FIELD) or 0x54 (PROPERTY)
- assert designator == Signature.X_ELEMENT_KIND_FIELD
- || designator == Signature.X_ELEMENT_KIND_PROPERTY
- : "0x" + PEFile.byte2hex(designator);
- Type type = parseFieldOrPropTypeInNamedArg(); // FieldOrPropType
- String name = parseString(); // FieldOrPropName
- Object value = parseFixedArg(type); // FixedArg
- NamedArgument narg =
- new NamedArgument(designator, name, type, value);
- namedArgs.put(name, narg);
- }
- }
-
- private Object parseFixedArg(Type type) {
- if (type.IsArray())
- return parseArray(type.GetElementType());
- else
- return parseElem(type);
- }
-
- /* indicates whether the "simple" case (the other is "enum") of the first row
- in the Elem production should be taken. */
- private boolean isSimpleElem(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- switch(id){
- case Signature.ELEMENT_TYPE_STRING:
- case Signature.X_ELEMENT_TYPE_TYPE:
- case Signature.ELEMENT_TYPE_OBJECT:
- return false;
- default:
- return true;
- }
- }
-
- /* indicates whether the second row in the Elem production
- should be taken (and more specifically, "string" case within that row). */
- private boolean isStringElem(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- return id == Signature.ELEMENT_TYPE_STRING;
- }
-
- /* indicates whether the second row in the Elem production
- should be taken (and more specifically, "type" case within that row). */
- private boolean isTypeElem(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- return id == Signature.X_ELEMENT_TYPE_TYPE;
- }
-
- /* indicates whether the third row in the Elem production
- should be taken (and more specifically, "boxed" case within that row). */
- private boolean isSystemObject(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- return id == Signature.ELEMENT_TYPE_OBJECT;
- }
-
- private Object parseElem(Type type) {
- // simple or enum
- if (isSimpleElem(type)) return parseVal(getTypeId(type));
- if (type.IsEnum()) return parseVal(getTypeId(type.getUnderlyingType()));
- // string or type
- if (isStringElem(type)) return parseString();
- if (isTypeElem(type)) return getTypeFromSerString();
- // boxed valuetype, please notice that a "simple" boxed valuetype is preceded by 0x51
- if (isSystemObject(type)) {
- Type boxedT = parse0x51();
- if(boxedT.IsEnum()) {
- return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT.getUnderlyingType())));
- } else {
- return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT))); // TODO dead code?
- }
- } else {
- Type boxedT = parseType();
- return parseVal(getTypeId(boxedT));
- }
- }
-
- /* this does not parse an Elem, but a made-up production (Element). Don't read too much into this method name! */
- private Object parseVal(int id) {
- switch (id) {
- case Signature.ELEMENT_TYPE_BOOLEAN:
- return new Boolean(buf.get() == 0 ? false : true);
- case Signature.ELEMENT_TYPE_CHAR:
- return new Character(buf.getChar());
- case Signature.ELEMENT_TYPE_I1:
- case Signature.ELEMENT_TYPE_U1:
- return new Byte(buf.get()); // TODO U1 not the same as I1
- case Signature.ELEMENT_TYPE_I2:
- case Signature.ELEMENT_TYPE_U2:
- return new Short(buf.getShort()); // TODO U2 not the same as I2
- case Signature.ELEMENT_TYPE_I4:
- case Signature.ELEMENT_TYPE_U4:
- return new Integer(buf.getInt()); // TODO U4 not the same as I4
- case Signature.ELEMENT_TYPE_I8:
- case Signature.ELEMENT_TYPE_U8:
- return new Long(buf.getLong()); // TODO U8 not the same as I8
- case Signature.ELEMENT_TYPE_R4:
- return new Float(buf.getFloat());
- case Signature.ELEMENT_TYPE_R8:
- return new Double(buf.getDouble());
- case Signature.X_ELEMENT_TYPE_TYPE:
- return getTypeFromSerString();
- case Signature.ELEMENT_TYPE_STRING:
- return parseString();
- default:
- throw new RuntimeException("Shouldn't have called parseVal with: " + id);
- }
- }
-
- private Object parseArray(Type type) {
- if (type.IsEnum())
- return parseArray(type.getUnderlyingType());
- return parseArray(getTypeId(type));
- }
-
- private Object parseArray(int id) {
- switch (id) {
- case Signature.ELEMENT_TYPE_BOOLEAN:
- return parseBooleanArray();
- case Signature.ELEMENT_TYPE_CHAR:
- return parseCharArray();
- case Signature.ELEMENT_TYPE_I1:
- case Signature.ELEMENT_TYPE_U1: // TODO U1 not the same as I1
- return parseByteArray();
- case Signature.ELEMENT_TYPE_I2:
- case Signature.ELEMENT_TYPE_U2:
- return parseShortArray();
- case Signature.ELEMENT_TYPE_I4:
- case Signature.ELEMENT_TYPE_U4:
- return parseIntArray();
- case Signature.ELEMENT_TYPE_I8:
- case Signature.ELEMENT_TYPE_U8:
- return parseLongArray();
- case Signature.ELEMENT_TYPE_R4:
- return parseFloatArray();
- case Signature.ELEMENT_TYPE_R8:
- return parseDoubleArray();
- case Signature.ELEMENT_TYPE_STRING:
- return parseStringArray();
- case Signature.X_ELEMENT_TYPE_ENUM:
- return parseArray(getTypeFromSerString());
- default:
- throw new RuntimeException("Unknown type id: " + id);
- }
- }
-
- private Type parseType() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec.
- int id = buf.get();
- switch (id) {
- case Signature.ELEMENT_TYPE_SZARRAY:
- Type arrT = Type.mkArray(parseType(), 1);
- return arrT;
- case Signature.X_ELEMENT_TYPE_ENUM:
- String enumName = parseString();
- Type enumT = Type.getType(enumName);
- return enumT;
- default:
- Type t = (Type)id2type.get(new Integer(id));
- assert t != null : PEFile.byte2hex(id);
- return t;
- }
- }
-
- private Type parse0x51() {
- int id = buf.get();
- switch (id) {
- case 0x51:
- return parse0x51();
- case Signature.ELEMENT_TYPE_SZARRAY:
- Type arrT = Type.mkArray(parseType(), 1);
- return arrT;
- case Signature.X_ELEMENT_TYPE_ENUM:
- String enumName = parseString();
- Type enumT = Type.getType(enumName);
- return enumT;
- default:
- Type t = (Type)id2type.get(new Integer(id));
- assert t != null : PEFile.byte2hex(id);
- return t;
- }
- }
-
-
- private Type parseFieldOrPropTypeInNamedArg() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec.
- int id = buf.get();
- switch (id) {
- case 0x51:
- return (Type)(id2type.get(new Integer(Signature.ELEMENT_TYPE_OBJECT)));
- // TODO remove case Signature.ELEMENT_TYPE_SZARRAY:
- // Type arrT = Type.mkArray(parseType(), 1);
- // return arrT;
- case Signature.X_ELEMENT_TYPE_ENUM:
- String enumName = parseString();
- Type enumT = Type.getType(enumName); // TODO this "lookup" only covers already-loaded assemblies.
- return enumT; // TODO null as return value (due to the above) spells trouble later.
- default:
- Type t = (Type)id2type.get(new Integer(id));
- assert t != null : PEFile.byte2hex(id);
- return t;
- }
- }
-
- private Type getTypeFromSerString() {
- String typename = parseString();
- int i = typename.indexOf(',');
- /* fully qualified assembly name follows. Just strip it on the assumption that
- the assembly is referenced in the externs and the type will be found. */
- String name = (i < 0) ? typename : typename.substring(0, i);
- Type t = Type.GetType(name);
- if (t == null && i > 0) {
- int j = typename.indexOf(',', i + 1);
- if (j > 0) {
- String assemName = typename.substring(i + 1, j);
- try {
- Assembly.LoadFrom(assemName);
- } catch (Throwable e) {
- throw new RuntimeException(typename, e);
- }
- t = Type.GetType(name);
- }
- }
- assert t != null : typename;
- return t;
- }
-
- private boolean[] parseBooleanArray() {
- boolean[] arr = new boolean[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.get() == 0 ? false : true;
- return arr;
- }
-
- private char[] parseCharArray() {
- char[] arr = new char[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getChar();
- return arr;
- }
-
- private byte[] parseByteArray() {
- byte[] arr = new byte[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.get();
- return arr;
- }
-
- private short[] parseShortArray() {
- short[] arr = new short[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getShort();
- return arr;
- }
-
- private int[] parseIntArray() {
- int[] arr = new int[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getInt();
- return arr;
- }
-
- private long[] parseLongArray() {
- long[] arr = new long[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getLong();
- return arr;
- }
-
- private float[] parseFloatArray() {
- float[] arr = new float[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getFloat();
- return arr;
- }
-
- private double[] parseDoubleArray() {
- double[] arr = new double[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getDouble();
- return arr;
- }
-
- private String[] parseStringArray() {
- String[] arr = new String[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = parseString();
- return arr;
- }
-
- private String parseString() { // SerString convention
- String str = null;
- int length = parseLength();
- if (length < 0)
- return null;
- try { str = new String(value, buf.position(), length, "UTF-8" ); }
- catch (UnsupportedEncodingException e) { throw new Error(e); }
- buf.position(buf.position() + length);
- return str;
- }
-
- private int getByte() {
- return (buf.get() + 0x0100) & 0xff;
- }
-
- public int parseLength() {
- int length = getByte();
- // check for invalid length format: the first, second or third
- // most significant bits should be 0; if all are 1 the length is invalid.
- if ((length & 0xe0) == 0xe0)
- return -1;
- if ((length & 0x80) != 0) {
- length = ((length & 0x7f) << 8) | getByte();
- if ((length & 0x4000) != 0)
- length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte();
- }
- return length;
- }
-
- //##########################################################################
- private static void formatValue(StringBuffer str, Object o) {
- Class c = (o == null) ? null : o.getClass();
- if (c == null) {
- str.append("<null>");
- } else if (c == String.class) {
- str.append('"');
- str.append(o);
- str.append('"');
- } else if (c == Character.class) {
- str.append('\'');
- str.append(o);
- str.append('\'');
- } else if (c == boolean[].class) {
- str.append("new boolean[] {");
- boolean[] arr = (boolean[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == char[].class) {
- str.append("new short[] {");
- short[] arr = (short[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == byte[].class) {
- str.append("new byte[] {");
- byte[] arr = (byte[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == short[].class) {
- str.append("new short[] {");
- short[] arr = (short[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == int[].class) {
- str.append("new int[] {");
- int[] arr = (int[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == long[].class) {
- str.append("new long[] {");
- long[] arr = (long[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == float[].class) {
- str.append("new float[] {");
- float[] arr = (float[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == double[].class) {
- str.append("new double[] {");
- double[] arr = (double[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == String[].class) {
- str.append("new String[] {");
- String[] arr = (String[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- formatValue(str, arr[i]);
- }
- str.append('}');
- } else if (o instanceof Type) {
- str.append("typeof(");
- str.append(o);
- str.append(")");
- } else
- str.append(o);
- }
-
- //##########################################################################
-
- /** Represents named arguments (assigned outside of the constructor)
- * of a custom attribute
- */
- public static class NamedArgument {
-
- /** Designates if the named argument corresponds to a field or property.
- * Possible values:
- * Signature.X_ELEMENT_KIND_FIELD = 0x53
- * Signature.X_ELEMENT_KIND_PROPERTY = 0x54
- */
- public final int designator;
-
- /** The name of the field/property. */
- public final String name;
-
- /** Type of the field/property. */
- public final Type type;
-
- /** The value for the field/property. */
- public final Object value;
-
- /** An empty array NamedArgument. */
- public static final NamedArgument[] EMPTY = new NamedArgument[0];
-
- public NamedArgument(int designator, String name,Type type,Object value)
- {
- this.designator = designator;
- this.name = name;
- this.type = type;
- this.value = value;
- }
-
- /** @return <b>true</b> if the named argument specifies a field;
- * <b>false<b> otherwise.
- */
- public boolean isField() {
- return designator == Signature.X_ELEMENT_KIND_FIELD;
- }
-
- /** @return <b>true</b> if the named argument specifies a property;
- * <b>false<b> otherwise.
- */
- public boolean isProperty() {
- return designator == Signature.X_ELEMENT_KIND_PROPERTY;
- }
-
- /** @return a string representation of the named argument. */
- public String toString() {
- StringBuffer str = new StringBuffer(name);
- str.append(" = ");
- if (type.IsEnum())
- str.append('(').append(type.FullName).append(')');
- formatValue(str, value);
- return str.toString();
- }
- }
-
- //##########################################################################
-
- public static class BoxedArgument {
- public final Type type;
- public final Object value;
- public BoxedArgument(Type type, Object value) {
- this.type = type; this.value = value;
- }
- public String toString() {
- return "(" + type.FullName + ")" + value;
- }
- }
-
- //##########################################################################
-
-} // class Attribute
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java b/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
deleted file mode 100644
index cac2319b50..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that control binding and the way in which
- * the search for members and types is conducted by reflection.
- *
- * Note: You must specify Instance or Static along with Public or NonPublic
- * or no members will be returned.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class BindingFlags {
-
- //##########################################################################
-
- // disallows extending the class;
- private BindingFlags() {}
-
- /**
- * Specifies no binding flag.
- */
- public static final int Default = 0x0000;
-
- /**
- * Specifies that the case of the member name should not be considered
- * when binding.
- */
- public static final int IgnoreCase = 0x0001;
-
- /**
- * Specifies that only members declared at the level of the supplied type's
- * hierarchy should be considered. Inherited members are not considered.
- */
- public static final int DeclaredOnly = 0x0002;
-
- /**
- * Specifies that instance members are to be included in the search.
- */
- public static final int Instance = 0x0004;
-
- /**
- * Specifies that static members are to be included in the search.
- */
- public static final int Static = 0x0008;
-
- /**
- * Specifies that public members are to be included in the search.
- */
- public static final int Public = 0x0010;
-
- /**
- * Specifies that non-public members are to be included in the search.
- */
- public static final int NonPublic = 0x0020;
-
- /**
- * Specifies that static members up the hierarchy should be returned.
- * Static members include fields, methods, events, and properties.
- * Nested types are not returned.
- */
- public static final int FlattenHierarchy = 0x0040;
-
- /**
- * Specifies that a method is to be invoked. This may not be a constructor
- * or a type initializer.
- */
- public static final int InvokeMethod = 0x0100;
-
- /**
- * Specifies that Reflection should create an instance of
- * the specified type. Calls the constructor that matches
- * the given arguments. The supplied member name is ignored.
- * If the type of lookup is not specified, (Instance | Public)
- * will apply. It is not possible to call a type initializer.
- */
- public static final int CreateInstance = 0x0200;
-
- /**
- * Specifies that the value of the specified field should be returned.
- */
- public static final int GetField = 0x0400;
-
- /**
- * Specifies that the value of the specified field should be set.
- */
- public static final int SetField = 0x0800;
-
- /**
- * Specifies that the value of the specified property should be returned.
- */
- public static final int GetProperty = 0x1000;
-
- /**
- * Specifies that the value of the specified property should be set.
- * For COM properties, specifying this binding flag is equivalent to
- * specifying PutDispProperty and PutRefDispProperty.
- */
- public static final int SetProperty = 0x2000;
-
- /**
- * Specifies that the PROPPUT member on a COM object should be invoked.
- * PROPPUT specifies a property-setting function that uses a value.
- * Use PutDispProperty if a property has both PROPPUT and PROPPUTREF
- * and you need to distinguish which one is called.
- */
- public static final int PutDispProperty = 0x4000;
-
-
- /**
- * Specifies that the PROPPUTREF member on a COM object should be invoked.
- * PROPPUTREF specifies a property-setting function that uses a reference
- * instead of a value. Use PutRefDispProperty if a property has both
- * PROPPUT and PROPPUTREF and you need to distinguish which one is called.
- */
- public static final int PutRefDispProperty = 0x8000;
-
- /**
- * Specifies that types of the supplied arguments must exactly match
- * the types of the corresponding formal parameters. Reflection
- * throws an exception if the caller supplies a non-null Binder object,
- * since that implies that the caller is supplying BindToXXX
- * implementations that will pick the appropriate method.
- * Reflection models the accessibility rules of the common type system.
- * For example, if the caller is in the same assembly, the caller
- * does not need special permissions for internal members. Otherwise,
- * the caller needs ReflectionPermission. This is consistent with
- * lookup of members that are protected, private, and so on.
- * The general principle is that ChangeType should perform only
- * widening coercions, which never lose data. An example of a
- * widening coercion is coercing a value that is a 32-bit signed integer
- * to a value that is a 64-bit signed integer. This is distinguished
- * from a narrowing coercion, which may lose data. An example of
- * a narrowing coercion is coercing a 64-bit signed integer to
- * a 32-bit signed integer.
- * The default binder ignores this flag, while custom binders can
- * implement the semantics of this flag.
- */
- public static final int ExactBinding = 0x10000;
-
- /**
- * Used in COM interop to specify that the return value of the member
- * can be ignored.
- */
- public static final int IgnoreReturn = 0x100000 ;
-
- /**
- * Returns the set of members whose parameter count matches the number
- * of supplied arguments. This binding flag is used for methods with
- * parameters that have default values and methods with variable arguments
- * (varargs). This flag should only be used with Type.InvokeMember.
- * Parameters with default values are used only in calls where trailing
- * arguments are omitted. They must be the last arguments.
- */
- public static final int OptionalParamBinding = 0x40000;
-
- /**
- * Not implemented.
- */
- public static final int SuppressChangeType = 0x20000;
-
- //##########################################################################
-
-} // class BindingFlags
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java b/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
deleted file mode 100644
index 50bf9fb5d5..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Calling conventions
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class CallingConventions {
-
- //########################################################################
-
- /**
- * Specifies the default calling convention as determined by the
- * common language runtime.
- */
- public static final short Standard = (short) 0x0001;
-
- /**
- * Specifies the calling convention for methods with variable arguments.
- */
- public static final short VarArgs = (short) 0x0002;
-
- /**
- * Specifies that either the Standard or the VarArgs calling
- * convention may be used.
- */
- public static final short Any = Standard | VarArgs;
-
- /**
- * Specifies an instance or virtual method (not a static method).
- * At run-time, the called method is passed a pointer to the target
- * object as its first argument (the this pointer). The signature
- * stored in metadata does not include the type of this first argument,
- * because the method is known and its owner class can be discovered
- * from metadata.
- */
- public static final short HasThis = (short) 0x0020;
-
- /**
- * Specifies that the signature is a function-pointer signature,
- * representing a call to an instance or virtual method (not a static
- * method). If ExplicitThis is set, HasThis must also be set. The first
- * argument passed to the called method is still a this pointer, but the
- * type of the first argument is now unknown. Therefore, a token that
- * describes the type (or class) of the this pointer is explicitly stored
- * into its metadata signature.
- */
- public static final short ExplicitThis = (short) 0x0040;
-
- //########################################################################
-
- private CallingConventions() {}
-
- public static String toString(int callConv) {
- StringBuffer s = new StringBuffer();
-
- if ((callConv & HasThis) != 0) {
- s.append("instance");
- if ((callConv & ExplicitThis) != 0)
- s.append(" explicit");
- }
-
- return s.toString();
- }
-
- //##########################################################################
-
-} // class CallingConventions
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java
deleted file mode 100644
index 8c82cb4876..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Arrays;
-
-/* The only reason for ConstructedType to extend Type is complying with existing code
- (e.g., caseFieldBuilder in ILPrinterVisitor) expecting a Type.
- */
-public class ConstructedType extends Type {
-
- public final Type instantiatedType;
- public final Type[] typeArgs;
-
- public ConstructedType(Type instantiatedType, Type[] typeArgs) {
- super(instantiatedType.Module, instantiatedType.Attributes, "", null, null, null, instantiatedType.auxAttr /*AuxAttr.None*/ , null);
- this.instantiatedType = instantiatedType;
- this.typeArgs = typeArgs;
- }
-
- public String toString() {
- String res = instantiatedType.toString() + "[";
- for (int i = 0; i < typeArgs.length; i++) {
- res = res + typeArgs[i].toString();
- if(i + 1 < typeArgs.length) {
- res = res + ", ";
- }
- }
- return res + "]";
- }
-
-
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
-
- ConstructedType that = (ConstructedType) o;
-
- if (!instantiatedType.equals(that.instantiatedType)) return false;
- if (!Arrays.equals(typeArgs, that.typeArgs)) return false;
-
- return true;
- }
-
- public int hashCode() {
- int result = instantiatedType.hashCode();
- result = 31 * result + Arrays.hashCode(typeArgs);
- return result;
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
deleted file mode 100644
index 69f5d6d32a..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a class constructor and provides
- * access to constructor metadata.
- * ConstructorInfo is used to discover the attributes of a constructor
- * as well as to invoke a constructor. Objects are created by invoking
- * either the GetConstructors or GetConstructor method of a Type object.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class ConstructorInfo extends MethodBase {
- //##########################################################################
-
- public final int MemberType() { return MemberTypes.Constructor; }
-
- public final boolean IsConstructor() { return true; }
-
- protected static final String CTOR = ".ctor";
- protected static final String CCTOR = ".cctor";
- protected static final ConstructorInfo[] EMPTY_ARRAY = new ConstructorInfo[0];
-
- protected static String getName(int attrs) {
- return (attrs & MethodAttributes.Static) == 0 ? CTOR : CCTOR;
- }
-
- /** Public constructors */
-
- public ConstructorInfo(Type declType, int attrs, Type[] paramTypes) {
- super(getName(attrs), declType, attrs, paramTypes);
- assert declType != null : "Owner can't be 'null' for a constructor!";
- }
-
- public ConstructorInfo(Type declType, int attrs, ParameterInfo[] params)
- {
- super(getName(attrs), declType, attrs, params);
- assert declType != null : "Owner can't be 'null' for a constructor!";
- }
-
-
- public String toString() {
- return MethodAttributes.toString(Attributes) + " " + Type.VOID() +
- " " + DeclaringType.FullName + "::" + Name + params2String();
- }
-
- //##########################################################################
-
-} // class ConstructorInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
deleted file mode 100644
index 0e58c18114..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.List;
-import java.util.LinkedList;
-import java.util.Iterator;
-
-/**
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class CustomAttributeProvider implements ICustomAttributeProvider {
-
- //##########################################################################
-
- protected List/*<Attribute>*/ custAttrs;
- private static final Object[] EMPTY = new Object[0];
-
- //TODO: take inherit into account
- public Object[] GetCustomAttributes(boolean inherit) {
- initAttributes(null);
- return custAttrs.size() == 0 ? EMPTY
- : custAttrs.toArray(new Attribute[custAttrs.size()]);
- }
-
- //TODO: take inherit into account
- public Object[] GetCustomAttributes(Type attributeType, boolean inherit) {
- initAttributes(attributeType);
- List tAttrs = null;
- if (constrType == attributeType)
- tAttrs = custAttrs;
- else {
- tAttrs = new LinkedList();
- for (Iterator attrs = custAttrs.iterator(); attrs.hasNext(); ) {
- Attribute a = (Attribute) attrs.next();
- if (a.GetType() == attributeType) tAttrs.add(a);
- }
- }
- return tAttrs.size() == 0 ? EMPTY
- : tAttrs.toArray(new Attribute[tAttrs.size()]);
- }
-
- //TODO: take inherit into account
- public boolean IsDefined(Type attributeType, boolean inherit) {
- initAttributes(attributeType);
- if (constrType == attributeType)
- return custAttrs.size() > 0;
- Iterator attrs = custAttrs.iterator();
- while (attrs.hasNext()) {
- if (((Attribute)attrs.next()).GetType() == attributeType)
- return true;
- }
- return false;
-// return inherit && (DeclaringClass.BaseType != null)
-// && DeclaringClass.BaseType.IsDefined(inherit);
- }
-
- protected void addCustomAttribute(ConstructorInfo constr, byte[] value) {
- Attribute attr = new Attribute(constr, value);
- assert constrType == null || constrType == attr.GetType();
- if (custAttrs == null)
- custAttrs = new LinkedList();
- custAttrs.add(attr);
- }
-
- private void initAttributes(Type atype) {
- if (custAttrs != null
- && (constrType == null || constrType == atype))
- return;
- custAttrs = new LinkedList();
- constrType = atype;
- loadCustomAttributes(atype);
- }
-
- protected void loadCustomAttributes(Type atype) {}
-
- private Type constrType;
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java
deleted file mode 100644
index cf30008c60..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Quoting from the CIL spec, Partition II, Sec. 7.1.1:
- *
- * Custom modifiers, defined using `modreq` (required modifier) and `modopt` (optional modifier), are
- * similar to custom attributes (Sec. 21) except that modifiers are part of a signature rather than being attached to a
- * declaration. Each modifer associates a type reference with an item in the signature.
- *
- */
-public class CustomModifier {
-
- public boolean isReqd;
- public Type marker;
-
- public CustomModifier(boolean isReqd, Type marker) {
- this.isReqd = isReqd;
- this.marker = marker;
- }
-
- public String toString() {
- String res = (isReqd ? "modreq( " : "modopt( ") + marker.toString() + " )";
- return res;
- }
-
- public static Type[] helperCustomMods(boolean isReqd, CustomModifier[] cmods) {
- if(cmods == null) return null;
- int count = 0;
- for (int idx = 0; idx < cmods.length; idx++) {
- if(cmods[idx].isReqd == isReqd) count++;
- }
- Type[] res = new Type[count];
- int residx = 0;
- for (int idx = 0; idx < cmods.length; idx++) {
- res[residx] = cmods[idx].marker;
- residx++;
- }
- return res;
- }
-
- public static Type VolatileMarker() {
- return Type.GetType("System.Runtime.CompilerServices.IsVolatile");
- }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
deleted file mode 100644
index a183993cb9..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that describe the attributes of a an event.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class EventAttributes {
-
- //##########################################################################
-
- /** Specifies that the event has no attributes. */
- public static final short None = 0x000;
-
- /** Specifies a reserved flag for CLR use only. */
- public static final short ReservedMask = 0x0400;
-
- /** Specifies that the event is special in a way described by the name. */
- public static final short SpecialName = 0x0200;
-
- /** Specifies the the CLR should check name encoding. */
- public static final short RTSpecialName = 0x0400;
-
- //##########################################################################
-
-} // class EventAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
deleted file mode 100644
index 3ccba7900b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Discovers the attributes of an event
- * and provides access to event metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class EventInfo extends MemberInfo {
-
- //##########################################################################
-
- public final int MemberType() { return MemberTypes.Event; }
-
- /** Attributes associated with the event. */
- public final short Attributes;
-
- /** The Type object for the underlying event-handler delegate
- * associated with this event.
- */
- public final Type EventHandlerType;
-
- public MethodInfo GetAddMethod() { return addMethod; }
-
- public MethodInfo GetRemoveMethod() { return removeMethod; }
-
- public String toString() {
- return "" + EventHandlerType + " " + Name;
- }
-
- //##########################################################################
-
- protected static final EventInfo[] EMPTY_ARRAY = new EventInfo[0];
-
- protected MethodInfo addMethod;
-
- protected MethodInfo removeMethod;
-
- protected EventInfo(String name, Type declType, short attr,
- Type handlerType, MethodInfo add, MethodInfo remove)
- {
- super(name, declType);
- Attributes = attr;
- EventHandlerType = handlerType;
- this.addMethod = add;
- this.removeMethod = remove;
- }
-
- //##########################################################################
-
-} // class EventInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
deleted file mode 100644
index d7d1bb3d54..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that describe the attributes of a field.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class FieldAttributes {
-
- //##########################################################################
-
- /** Specifies the access level of a given field. */
- public static final short FieldAccessMask = 0x0007;
-
- /** Member not refereneceable. */
- public static final short CompilerControlled = 0x0000;
-
- /** Field is accessible only by the parent type. */
- public static final short Private = 0x0001;
-
- /** Field is accessible only by subtypes in this assembly. */
- public static final short FamANDAssem = 0x0002;
-
- /** Field is accessible throughout the assembly. */
- public static final short Assembly = 0x0003;
-
- /** Field is accessible only by type and subtypes. */
- public static final short Family = 0x0004;
-
- /** Field is accessible by subtypes anywhere,
- * as well as throughout this assembly. */
- public static final short FamORAssem = 0x0005;
-
- /** Specifies that the field is accessible by any member
- * for whom this scope is visible. */
- public static final short Public = 0x0006;
-
- //##########################################################################
- //
-
- /** Field represents the defined type, or else it is per-instance. */
- public static final short Static = 0x0010;
-
- /** Field is initialized only and cannot be written after initialization. */
- public static final short InitOnly = 0x0020;
-
- /** Value is compile-time constant. */
- public static final short Literal = 0x0040;
-
- /** Field does not have to be serialized when the type is remoted. */
- public static final short NotSerialized = 0x0080;
-
- /** Field is special. */
- public static final short SpecialName = 0x0200;
-
- //##########################################################################
- // Interop attributes
-
- /** Implementation is forwarded through PInvoke */
- public static final short PinvokeImpl = 0x2000;
-
-
- //##########################################################################
- // Additional flags
-
- /** CLI provides 'special' behavior depending upon the name of the field */
- public static final short RTSpecialName = 0x0400;
-
- /** Field has marshalling information. */
- public static final short HasFieldMarshal = 0x1000;
-
- /** Field has a default value. */
- public static final short HasDefault = (short)0x8000;
-
- /** Field has a Relative Virtual Address (RVA). The RVA is the location
- * of the method body in the current image, as an address relative
- * to the start of the image file in which it is located. */
- public static final short HasFieldRVA = 0x0100;
-
- //##########################################################################
- //
-
- public static String toString(short attrs) {
- StringBuffer str = new StringBuffer();
- switch (attrs & FieldAccessMask) {
- case CompilerControlled: str.append("compilercontrolled"); break;
- case Private: str.append("private"); break;
- case FamANDAssem: str.append("famandassem"); break;
- case Assembly: str.append("assembly"); break;
- case Family: str.append("family"); break;
- case FamORAssem: str.append("famorassem"); break;
- case Public: str.append("public"); break;
- }
- if ((attrs & Static) != 0) str.append(" static");
- if ((attrs & InitOnly) != 0) str.append(" initonly");
- if ((attrs & Literal) != 0) str.append(" literal");
- if ((attrs & NotSerialized) != 0) str.append(" notserialized");
- if ((attrs & SpecialName) != 0) str.append(" specialname");
- if ((attrs & PinvokeImpl) != 0) str.append("");
- if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
- if ((attrs & HasFieldMarshal) != 0) str.append(" marshal(<native type>)");
- //if ((attrs & HasDefault) != 0) str.append(" default(???)");
- return str.toString();
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private FieldAttributes() {}
-
- //##########################################################################
-
-} // class FieldAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
deleted file mode 100644
index 536a67e9a8..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-/**
- * Discovers the attributes of a field and provides access to field metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class FieldInfo extends MemberInfo implements HasCustomModifiers {
-
- //##########################################################################
- // public interface
-
- public final int MemberType() { return MemberTypes.Field; }
-
- /** Attributes associated with this field. */
- public final short Attributes;
-
- /** Type of the field represented by this FieldInfo object. */
- public final Type FieldType;
-
- /** can be null */
- public final CustomModifier[] cmods;
-
- protected final Object value;
-
- public final boolean IsStatic() {
- return (Attributes & FieldAttributes.Static) != 0;
- }
-
- public final boolean IsInitOnly() {
- return (Attributes & FieldAttributes.InitOnly) != 0;
- }
-
- public final boolean IsLiteral() {
- return (Attributes & FieldAttributes.Literal) != 0;
-
- }
-
- public final boolean IsPublic() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Public;
- }
-
- public final boolean IsPrivate() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Private;
- }
-
- public final boolean IsFamily() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Family;
- }
-
- public final boolean IsAssembly() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Assembly;
- }
-
- public final boolean IsFamilyOrAssembly() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.FamORAssem;
- }
-
- public final boolean IsFamilyAndAssembly() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.FamANDAssem;
- }
- public final boolean IsSpecialName() {
- return (Attributes & FieldAttributes.SpecialName) != 0;
- }
-
- public final boolean IsPinvokeImpl() {
- return (Attributes & FieldAttributes.PinvokeImpl) != 0;
- }
-
- public final boolean IsNotSerialized() {
- return (Attributes & FieldAttributes.NotSerialized) != 0;
- }
-
- private boolean knownVolatile = false;
- private boolean cachedVolatile = false;
- public final boolean IsVolatile() {
- if(knownVolatile) return cachedVolatile;
- knownVolatile = true;
- if(cmods == null) {
- cachedVolatile = false;
- return cachedVolatile;
- }
- for (int idx = 0; idx < cmods.length; idx++) {
- if(cmods[idx].marker == CustomModifier.VolatileMarker()) {
- cachedVolatile = true;
- return cachedVolatile;
- }
- }
- cachedVolatile = false;
- return cachedVolatile;
- }
-
- public final Type[] GetOptionalCustomModifiers () {
- return CustomModifier.helperCustomMods(false, cmods);
- }
-
- public final Type[] GetRequiredCustomModifiers() {
- return CustomModifier.helperCustomMods(true, cmods);
- }
-
- public String toString() {
- return FieldAttributes.toString(Attributes) + " " +
- FieldType + " " + DeclaringType.FullName + "::" + Name;
- }
-
- //##########################################################################
-
- protected static final FieldInfo[] EMPTY_ARRAY = new FieldInfo[0];
-
- /** Initializes a new instance of the FieldInfo class. */
- protected FieldInfo(String name, Type declType,
- int attrs, PECustomMod fieldTypeWithMods, Object value)
- {
- super(name, declType);
- FieldType = fieldTypeWithMods.marked;
- cmods = fieldTypeWithMods.cmods;
- Attributes = (short) attrs;
- this.value = value;
- }
-
- /**
- */
- public Object getValue() { return value; }
-
- //##########################################################################
-
-} // class FieldInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java b/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java
deleted file mode 100644
index 6237fbafee..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * @author Miguel Garcia
- */
-public class GenericParamAndConstraints {
-
- public GenericParamAndConstraints(int Number, String Name, Type[] Constraints,
- boolean isInvariant, boolean isCovariant, boolean isContravariant,
- boolean isReferenceType, boolean isValueType, boolean hasDefaultConstructor) {
- this.Number = Number;
- this.Name = Name;
- this.Constraints = Constraints; // TODO representation for the class and new() constraints missing
- this.isInvariant = isInvariant;
- this.isCovariant = isCovariant;
- this.isContravariant = isContravariant;
- this.isReferenceType = isReferenceType;
- this.isValueType = isValueType;
- this.hasDefaultConstructor = hasDefaultConstructor;
-
- }
-
- public final int Number;
- public final String Name; // can be null
- public final Type[] Constraints; // can be empty array
- public final boolean isInvariant; // only relevant for TVars, not for an MVar
- public final boolean isCovariant; // only relevant for TVars, not for an MVar
- public final boolean isContravariant; // only relevant for TVars, not for an MVar
- public final boolean isReferenceType;
- public final boolean isValueType;
- public final boolean hasDefaultConstructor;
-
- public String toString() {
- String res = Name == null ? "<NoName>" : (Name.equals("") ? "<NoName>" : Name);
- res = res + " <: " + Constraints;
- return res;
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java b/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java
deleted file mode 100644
index 5ead087350..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-public interface HasCustomModifiers {
-
- public Type[] GetOptionalCustomModifiers();
-
- public Type[] GetRequiredCustomModifiers();
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
deleted file mode 100644
index 927185962c..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Provides custom attributes for reflection objects that support them.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public interface ICustomAttributeProvider {
-
- //##########################################################################
- // interface method definitions
-
- /** Returns an array of all of the custom attributes
- * defined on this member, excluding named attributes,
- * or an empty array if there are no custom attributes.
- *
- * @param inherit - When true, look up the hierarchy chain
- * for the inherited custom attribute.
- * @return - An array of Objects representing custom attributes,
- * or an empty array.
- */
- public Object[] GetCustomAttributes(boolean inherit);
-
-
- /** Returns an array of custom attributes defined on this member,
- * identified by type, or an empty array
- * if there are no custom attributes of that type.
- *
- * @param attributeType - The type of the custom attributes.
- * @param inherit - When true, look up the hierarchy chain
- * for the inherited custom attribute.
- * @return - An array of Objects representing custom attributes,
- * or an empty array.
- */
- public Object[] GetCustomAttributes(Type attributeType, boolean inherit);
-
-
- /** Indicates whether one or more instance of attributeType
- * is defined on this member
- *
- * @param attributeType - The type of the custom attributes
- * @param inherit - When true, look up the hierarchy chain
- * for the inherited custom attribute.
- * @return - true if the attributeType is defined on this member;
- * false otherwise.
- */
- public boolean IsDefined(Type attributeType, boolean inherit);
-
- //##########################################################################
-
-} // interface ICustomAttributeProvider
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
deleted file mode 100644
index 65ff1b290b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * The root class of the Reflection hierarchy.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MemberInfo extends CustomAttributeProvider {
-
- //##########################################################################
-
- /** The name of this member. */
- public final String Name;
-
- /**
- * The class that declares this member.
- * Note: if the MemberInfo object is a global member,
- * (that is, it was obtained from Module.GetMethods,
- * which returns global methods on a module), then DeclaringType
- * will be a null reference.
- */
- public final Type DeclaringType;
-
- /** An enumerated value from the MemberTypes class,
- * specifying a constructor, event, field, method,
- * property, type information, all, or custom. */
- public abstract int MemberType();
-
- //##########################################################################
- // protected members
-
- protected static final MemberInfo[] EMPTY_ARRAY = new MemberInfo[0];
-
- protected MemberInfo(String name, Type declType) {
- Name = name;
- DeclaringType = declType;
- }
-
- //########################################################################
-
-} // class MemberInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
deleted file mode 100644
index 5f49ad3323..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Marks each type of member that is defined as a derived class of MemberInfo.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class MemberTypes {
-
- //##########################################################################
-
- /** Specifies that the member is a constructor,
- * representing a ConstructorInfo member. */
- public static final int Constructor = 0x01;
-
-
- /** Specifies that the member is an event,
- * representing an EventInfo member. */
- public static final int Event = 0x02;
-
-
- /** Specifies that the member is a field,
- * representing a FieldInfo member. */
- public static final int Field = 0x04;
-
-
- /** Specifies that the member is a method,
- * representing a MethodInfo member. */
- public static final int Method = 0x08;
-
-
- /** Specifies that the member is a property,
- * representing a PropertyInfo member.
- */
- public static final int Property = 0x10;
-
- /** Specifies that the member is a type,
- * representing a TypeInfo member. */
- public static final int TypeInfo = 0x20;
-
-
- /** Specifies that the member is a custom member type. */
- public static final int Custom = 0x40;
-
-
- /** Specifies that the member is a nested type,
- * extending MemberInfo. */
- public static final int NestedType = 0x80;
-
-
- /** Specifies all member types. */
- public static final int All =
- Constructor | Event | Field | Method | Property | TypeInfo | NestedType;
-
-
- public static String toString(int memberType) {
- if ((memberType & Constructor) != 0) return "Constructor";
- if ((memberType & Event) != 0) return "Event";
- if ((memberType & Field) != 0) return "Field";
- if ((memberType & Method) != 0) return "Method";
- if ((memberType & Property) != 0) return "Property";
- if ((memberType & TypeInfo) != 0) return "TypeInfo";
- if ((memberType & Custom) != 0) return "Custom";
- if ((memberType & NestedType) != 0) return "NestedType";
- return "Unknown MemberType: " + memberType;
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private MemberTypes() {}
-
- //##########################################################################
-
-} // class MemberTypes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
deleted file mode 100644
index a703c38fb8..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/** Specifies flags for method attributes.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class MethodAttributes {
-
- //##########################################################################
- // Method access attributes
-
- /** Bitmask used to retrieve accessibility information. */
- public static final short MemberAccessMask = 0x0007;
-
- ///** Member not referenceable*/
- //public static final short CompilerConstrolled = 0x0000;
-
- /** Indicates that the member cannot be referenced. */
- public static final short PrivateScope = 0x0000;
-
- /** Method is accessible only by the current class. */
- public static final short Private = 0x0001;
-
- /** Method is accessible to members of this type
- * and its derived types that are in this assembly only. */
- public static final short FamANDAssem = 0x0002;
-
- /** Method is accessible to any class of this assembly. */
- public static final short Assembly = 0x0003;
-
- /** Method is accessible only to members of this class
- * and its derived classes. */
- public static final short Family = 0x0004;
-
- /** Method is accessible to derived classes anywhere,
- * as well as to any class in the assembly. */
- public static final short FamORAssem = 0x0005;
-
- /** Method is accessible to any object for which this object is in scope. */
- public static final short Public = 0x0006;
-
-
- //##########################################################################
- // Flags
-
- /** Method is defined on the type; otherwise, it is defined per instance. */
- public static final short Static = 0x0010;
-
- /** Method cannot be overridden. */
- public static final short Final = 0x0020;
-
- /** Method is virtual. */
- public static final short Virtual = 0x0040;
-
- /** Method hides by name and signature; otherwise, by name only. */
- public static final short HideBySig = 0x0080;
-
-
- //##########################################################################
- // vtable attributes
-
- /** Bitmask used to retrieve vtable attributes. */
- public static final short VtableLayoutMask = 0x0100;
-
- /** Method reuses existing slot in the vtable. */
- public static final short ReuseSlot = 0x0000;
-
-
- /** Method always gets a new slot in the vtable. */
- public static final short NewSlot = 0x0100;
-
-
- //##########################################################################
- // Flags
-
- /** Method does not provide implementation. */
- public static final short Abstract = 0x0400;
-
- /** Method is special. */
- public static final short SpecialName = 0x0800;
-
-
- //##########################################################################
- // Interop attributes
-
- /** Method implementation is forwarded through PInvoke. */
- public static final short PInvokeImpl = 0x2000;
-
- /** Reserved: shall be zero for conforming implementations.
- * Managed method is exported by thunk to unmanaged code. */
- public static final short UnmanagedExport = 0x0008;
-
-
- //##########################################################################
- // Additional flags
-
- /** CLI provides special behavior, depending on the name of the method. */
- public static final short RTSpecialName = 0x1000;
-
- /** Method has security associated with it.
- * Reserved flag for runtime use only.
- */
- public static final short HasSecurity = 0x00000040;
-
- /**
- * Indicates that the method calls another method containing security code.
- * Reserved flag for runtime use only.
- */
- public static final short RequireSecObject = 0x00004000;
-
- /** Indicates a reserved flag for runtime use only. */
- public static final short ReservedMask = 0x0000;
-
-
- //##########################################################################
-
- public static String toString(short attrs) {
- StringBuffer str = new StringBuffer(accessFlagsToString(attrs));
- if ((attrs & Static) != 0) str.append(" static");
- if ((attrs & Final) != 0) str.append(" final");
- if ((attrs & Virtual) != 0) str.append(" virtual");
- if ((attrs & Abstract) != 0) str.append(" abstract");
- if ((attrs & HideBySig) != 0) str.append(" hidebysig");
- if ((attrs & NewSlot) != 0) str.append(" newslot");
- if ((attrs & SpecialName) != 0) str.append(" specialname");
- if ((attrs & PInvokeImpl) != 0) str.append(" pinvokeimpl(?!?)");
- if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
- return str.toString();
-
- }
-
- public static String accessFlagsToString(short attrs) {
- switch (attrs & MemberAccessMask) {
- case PrivateScope: return "compilercontrolled";
- case Private: return "private";
- case FamANDAssem: return "famandassem";
- case Assembly: return "assembly";
- case Family: return "family";
- case FamORAssem: return "famorassem";
- case Public: return "public";
- default: return "xxx";
- }
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private MethodAttributes() {}
-
- //##########################################################################
-
-} // class Method Attributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
deleted file mode 100644
index fe6404346e..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Iterator;
-
-/**
- * The common superclass of MemberInfo and ConstructorInfo
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MethodBase extends MemberInfo {
-
- //##########################################################################
- // public interface
-
- private java.util.List /* GenericParamAndConstraints */ mVars = new java.util.LinkedList();
- private GenericParamAndConstraints[] sortedMVars = null;
-
- public void addMVar(GenericParamAndConstraints tvarAndConstraints) {
- sortedMVars = null;
- mVars.add(tvarAndConstraints);
- }
-
- public GenericParamAndConstraints[] getSortedMVars() {
- if(sortedMVars == null) {
- sortedMVars = new GenericParamAndConstraints[mVars.size()];
- for (int i = 0; i < sortedMVars.length; i ++){
- Iterator iter = mVars.iterator();
- while(iter.hasNext()) {
- GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next();
- if(tvC.Number == i) {
- sortedMVars[i] = tvC;
- }
- }
- }
- }
- return sortedMVars;
- }
-
- public final boolean IsGeneric() {
- return mVars.size() > 0;
- }
-
- /** The attributes associated with this method/constructor. */
- public final short Attributes;
-
- /***/
- public final short CallingConvention;
-
- public abstract boolean IsConstructor();
-
- public final boolean IsAbstract() {
- return (Attributes & MethodAttributes.Abstract) != 0;
- }
-
- public final boolean IsFinal() {
- return (Attributes& MethodAttributes.Final) != 0;
- }
-
- public final boolean IsVirtual() {
- return (Attributes& MethodAttributes.Virtual) != 0;
- }
-
- public final boolean IsInstance() {
- return !IsStatic() && !IsVirtual();
- }
-
- public final boolean IsStatic() {
- return (Attributes & MethodAttributes.Static) != 0;
- }
-
- public final boolean IsHideBySig() {
- return (Attributes & MethodAttributes.HideBySig) != 0;
- }
-
- public final boolean IsSpecialName() {
- return (Attributes & MethodAttributes.SpecialName) != 0;
- }
-
-
- public final boolean IsPublic() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Public;
- }
-
- public final boolean IsPrivate() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Private;
- }
-
- public final boolean IsFamily() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Family;
- }
-
- public final boolean IsAssembly() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Assembly;
- }
-
- public final boolean IsFamilyOrAssembly() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.FamORAssem;
- }
-
- public final boolean IsFamilyAndAssembly() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.FamANDAssem;
- }
-
- public boolean HasPtrParamOrRetType() {
- // the override in MethodInfo checks the return type
- ParameterInfo[] ps = GetParameters();
- for (int i = 0; i < ps.length; i++) {
- Type pT = ps[i].ParameterType;
- if(pT.IsPointer()) {
- // Type.mkPtr creates a msil.Type for a pointer type
- return true;
- }
- if(pT.IsByRef() && !pT.GetElementType().CanBeTakenAddressOf()) {
- /* TODO Cases where GenMSIL (so far) con't emit good bytecode:
- the type being taken address of IsArray(), IsGeneric(), or IsTMVarUsage.
- For example, System.Enum declares
- public static bool TryParse<TEnum>(string value, out TEnum result) where TEnum : struct, new();
- */
- return true;
- }
- }
- return false;
- }
-
- /** Returns the parameters of the method/constructor. */
- public ParameterInfo[] GetParameters() {
- return (ParameterInfo[]) params.clone();
- }
-
- public int GetMethodImplementationFlags() { return implAttributes; }
-
- //##########################################################################
-
- /** Method parameters. */
- protected ParameterInfo[] params;
-
- protected short implAttributes;
-
- protected MethodBase(String name, Type declType, int attrs, Type[] paramTypes)
- {
- this(name, declType, attrs);
- assert paramTypes != null;
- params = new ParameterInfo[paramTypes.length];
- for (int i = 0; i < params.length; i++)
- params[i] = new ParameterInfo(null, paramTypes[i], 0, i);
- }
-
- protected MethodBase(String name, Type declType, int attrs,
- ParameterInfo[] params)
- {
- this(name, declType, attrs);
- this.params = params;
- }
-
- /**
- */
- private MethodBase(String name, Type declType, int attrs) {
- super(name, declType);
-
- Attributes = (short) attrs;
-
- if (IsConstructor()) {
- attrs |= MethodAttributes.SpecialName;
- attrs |= MethodAttributes.RTSpecialName;
- }
-
- CallingConvention = (short) (CallingConventions.Standard
- | (IsStatic() ? (short)0 : CallingConventions.HasThis));
- }
-
- //##########################################################################
- // internal methods
-
- protected String params2String() {
- StringBuffer s = new StringBuffer("(");
- for (int i = 0; i < params.length; i++) {
- if (i > 0) s.append(", ");
- s.append(params[i].ParameterType);
- }
- s.append(")");
- return s.toString();
- }
-
- //##########################################################################
-
-} // class MethodBase
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
deleted file mode 100644
index 8e8d879593..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Method implementation attributes
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MethodImplAttributes {
-
- //##########################################################################
-
- /**
- * Specifies flags about code type. 3
- */
- public static final short CodeTypeMask = (short) 0x0003;
-
- /**
- * Specifies that the method implementation is in MSIL. 0
- */
- public static final short IL = (short) 0x0000;
-
- /**
- * Specifies that the method implementation is native. 1
- */
- public static final short Native = (short) 0x0001;
-
- /**
- * This member supports the .NET Framework infrastructure and
- * is not intended to be used directly from your code. 2
- */
- public static final short OPTIL = (short) 0x0002;
-
- /**
- * Specifies that the method implementation is provided by the runtime. 3
- */
- public static final short Runtime = (short) 0x0003;
-
-
-
- /**
- * Specifies whether the code is managed or unmanaged. 4
- */
- public static final short ManagedMask = (short) 0x0004;
-
- /**
- * Specifies that the method implementation is managed, otherwise unmanaged.
- */
- public static final short Managed = (short) 0x0000;
-
- /**
- * Specifies that the method implementation is unmanaged, otherwise managed.
- */
- public static final short Unmanaged = (short) 0x0004;
-
-
-
- /**
- * Specifies that the method cannot be inlined. 8
- */
- public static final short NoInlining = (short) 0x0008;
-
- /**
- * Specifies that the method is not defined. 16
- */
- public static final short ForwardRef = (short) 0x0010;
-
- /**
- * Specifies that the method is single-threaded through the body.
- * You can also use the C# lock statement or the Visual Basic
- * Lock function for this purpose. 32
- */
- public static final short Synchronized = (short) 0x0020;
-
- /**
- * Specifies that the method signature is exported exactly as declared. 128
- */
- public static final short PreserveSig = (short) 0x0080;
-
- /**
- * Specifies an internal call. 4096
- */
- public static final short InternalCall = (short) 0x1000;
-
- /**
- * Specifies a range check value. 65535
- */
- public static final short MaxMethodImplVal = (short) 0xffff;
-
- //##########################################################################
-
- public static String toString(int implAttr) {
- StringBuffer s = new StringBuffer();
- switch (implAttr & CodeTypeMask) {
- case IL: s.append("cil"); break;
- case Native: s.append("native"); break;
- case Runtime: s.append("runtime"); break;
- }
- switch (implAttr & ManagedMask) {
- case Managed: s.append(" managed"); break;
- case Unmanaged: s.append(" unmanaged"); break;
- }
- if ((implAttr & NoInlining) != 0) s.append(" noinlining");
- if ((implAttr & ForwardRef) != 0) s.append(" forwardref");
- if ((implAttr & Synchronized) != 0) s.append(" synchronized");
- if ((implAttr & InternalCall) != 0) s.append(" internalcall");
- return s.toString();
- }
-
- //##########################################################################
-
-} // class MethodImplAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
deleted file mode 100644
index a415e7551f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Iterator;
-
-/**
- * Discovers the attributes of a method and provides access to method metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class MethodInfo extends MethodBase {
-
- public boolean HasPtrParamOrRetType() {
- if(ReturnType.IsByRef() && !(ReturnType.GetElementType().IsValueType())) {
- /* A method returning ByRef won't pass peverify, so I guess this is dead code. */
- return true;
- }
- if(ReturnType.IsPointer()) {
- return true;
- }
- return super.HasPtrParamOrRetType();
- }
-
- //##########################################################################
- // public members
-
- public final int MemberType() { return MemberTypes.Method; }
-
- public final boolean IsConstructor() { return false; }
-
- /** The return type of this method.
- */
- public final Type ReturnType;
-
- //##########################################################################
- // protected members
-
- protected static final MethodInfo[] EMPTY_ARRAY = new MethodInfo[0];
-
- /**
- * Constructor Initializes a new instance of the MethodInfo class.
- */
- protected MethodInfo(String name, Type declType,
- int attrs, Type returnType, Type[] paramTypes )
- {
- super(name, declType, attrs, paramTypes);
- ReturnType = returnType;
- }
-
- protected MethodInfo(String name, Type declType,
- int attrs, Type returnType, ParameterInfo[] params )
- {
- super(name, declType, attrs, params);
- ReturnType = returnType;
- }
-
- public String toString() {
- return MethodAttributes.toString(Attributes) + " " + ReturnType +
- " " + DeclaringType + "::" + Name + params2String();
- }
-
- //##########################################################################
-
-} // class MethodInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Module.java b/src/msil/ch/epfl/lamp/compiler/msil/Module.java
deleted file mode 100644
index 8dd5e7119f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Module.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/**
- * Defines and represents a module. Get an instance of ModuleBuilder
- * by calling DefineDynamicModule
- * A module is a portable executable file of type .dll or .exe consisting
- * of one or more classes and interfaces. There may be multiple namespaces
- * contained in a single module, and a namespace may span multiple modules.
- * One or more modules deployed as a unit compose an assembly.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Module extends CustomAttributeProvider {
-
- //##########################################################################
- // public fields
-
- /** String representing the name of the module with the path removed. */
- public final String Name;
-
- /** String representing the fully qualified name and path to this module. */
- public final String FullyQualifiedName;
-
- /** String representing the name of the module. */
- public String ScopeName;
-
- /** The Assembly the Module belongs to. */
- public final Assembly Assembly;
-
- //##########################################################################
- // constructor
-
- protected Module(String name, String filename,
- String scopeName, Assembly assembly)
- {
- this.Name = name;
- this.FullyQualifiedName = filename;
- this.ScopeName = scopeName;
- this.Assembly = assembly;
- }
-
- //##########################################################################
- // public methods
-
- /** Returns the specified class, performing a case-sensitive search. */
- public Type GetType(String name) {
- initTypes();
- return (Type) typesMap.get(name);
- }
-
- /**
- * @return all the classes defined within this module.
- */
- public Type[] GetTypes() {
- initTypes();
- return (Type[]) types.clone();
- }
-
- /**
- * @return the global field with the specified name.
- */
- public FieldInfo GetField(String name) {
- for (int i = 0; i < fields.length; i++)
- if (fields[i].Name.equals(name))
- return fields[i];
- return null;
- }
-
- /**
- * @return an array of the global fields of the module
- */
- public FieldInfo[] GetFields() {
- return (FieldInfo[]) fields.clone();
- }
-
- /**
- * @return - the global method with the specified name
- */
- public MethodInfo GetMethod(String name) {
- for (int i = 0; i < methods.length; i++)
- if (methods[i].Name.equals(name))
- return methods[i];
- return null;
- }
-
- /**
- * @return - an array of all the global methods defined in this modules.
- */
- public MethodInfo[] GetMethods() {
- return (MethodInfo[]) methods.clone();
- }
-
- /**
- */
- public String toString() { return Name; }
-
- //########################################################################
- // protected members
-
- // all the types defined in this module
- protected final Map typesMap = new HashMap();
-
- // all the types defined in this module
- protected Type[] types;
-
- // the global fields of the module
- protected FieldInfo[] fields = FieldInfo.EMPTY_ARRAY;
-
- // the global methods of the module
- protected MethodInfo[] methods = MethodInfo.EMPTY_ARRAY;
-
- protected Type addType(Type type) {
- addType(type.FullName, type);
- Assembly.addType(type);
- return type;
- }
-
- protected Type addType(String name, Type type) {
- assert type!= null;
- typesMap.put(name, type);
- return type;
- }
-
- private boolean initTypes = true;
- protected final void initTypes() {
- if (initTypes) {
- loadTypes();
- initTypes = false;
- }
- }
-
- protected void loadTypes() {}
-
- private boolean initGlobals = true;
- protected final void initGlobals() {
- if (initGlobals) {
- loadGlobals();
- initGlobals = false;
- }
- }
-
- protected void loadGlobals() {}
-
- //##########################################################################
-
-} // class Module
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java b/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
deleted file mode 100644
index a31db16c92..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.io.File;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/** Represents an assembly that resides in a real .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEAssembly extends Assembly {
-
- private final PEFile pefile;
-
- private PEModule mainModule;
-
- public PEAssembly(PEFile pefile, AssemblyName an) {
- super(an, true);
- this.pefile = pefile;
- String name = pefile.ModuleDef(1).getName();
- mainModule = new PEModule(pefile, 1, name, this);
- addModule(name, mainModule);
- //initModules();
- }
-
- protected void loadModules() {
- File parentDir = pefile.getParentFile();
- FileDef fd = pefile.FileDef;
- for (int row = 1; row <= fd.rows; row++) {
- fd.readRow(row);
- String filename = fd.getName();
- File f = new File(parentDir, filename);
- PEFile pe = Assembly.getPEFile(f);
- if (pe == null) {
- f = new File(filename);
- pe = Assembly.getPEFile(f);
- if (pe == null)
- continue;
-// throw new RuntimeException("Cannot find file " + filename +
-// " referenced by assembly " + this);
- }
- String name = pe.ModuleDef(1).getName();
- PEModule module = new PEModule(pe, 1, name, this);
- addModule(name, module);
- }
- }
-
- public File getFile() {
- return pefile.getUnderlyingFile();
- }
-
- protected void loadCustomAttributes(Type attributeType) {
- initModules();
- mainModule.initAttributes(this, 1, Table.AssemblyDef.ID, attributeType);
- }
-
- //##########################################################################
-
-} // class PEAssembly
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
deleted file mode 100644
index 3eb22b9985..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
+++ /dev/null
@@ -1,941 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.*;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import ch.epfl.lamp.compiler.msil.Type;
-import ch.epfl.lamp.compiler.msil.Module;
-
-import java.io.File;
-import java.io.RandomAccessFile;
-import java.io.PrintStream;
-import java.io.IOException;
-import java.io.FileNotFoundException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.MappedByteBuffer;
-
-import java.util.Date;
-
-/**
- * A class that represents a .NET PE/COFF image.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- * @see <a href="http://www.ecma-international.org/publications/standards/Ecma-335.htm">Standard ECMA-335: Common Language Infrastructure (CLI), 4th edition (June 2006)</a>
- */
-public class PEFile {
-
- //##########################################################################
-
- public static final int INT_SIZE = 4;
-
- protected final int PE_SIGNATURE_OFFSET;
- protected final int COFF_HEADER_OFFSET;
- protected final int PE_HEADER_OFFSET;
-
- protected final int numOfSections;
- protected final int CLI_RVA;
- protected final int CLI_Length;
- public final int rvaMetadata;
- public final int posMetadata;
- protected final int numOfStreams;
- protected final int optHeaderSize;
-
- protected final File underlyingFile;
- protected final RandomAccessFile file;
- protected final MappedByteBuffer buf;
-
- protected final PESection [] sections;
-
- public PEStream Meta, Strings, US, Blob, GUID;
-
- private final Table [] tables = new Table[Table.MAX_NUMBER];
-
- public final boolean isDLL;
-
- protected final int heapSizes;
- public final boolean StringIsShort, BlobIsShort, GUIDIsShort;
-
- protected PEModule pemodule = null;
-
- //##########################################################################
- // PEFile constructor
-
- private static void fileFormatCheck(boolean cond, String s) {
- if (cond)
- throw new RuntimeException(s);
- }
-
- /**
- */
- public PEFile(String filename) throws FileNotFoundException {
- this.underlyingFile = new File(filename);
- this.file = new RandomAccessFile(underlyingFile, "r");
- FileChannel fc = file.getChannel();
- MappedByteBuffer bb = null;
- try {
- bb = fc.map(FileChannel.MapMode.READ_ONLY, 0L, fc.size());
- } catch (IOException e) { throw new RuntimeException(e); }
-
- /** Ecma 335, 25 File format extensions to PE:
- *
- * "Unless stated otherwise, all binary values are stored in little-endian format."
- */
-
- bb.order(java.nio.ByteOrder.LITTLE_ENDIAN);
- this.buf = bb;
-
- /** Ecma 335, 25.2.1 MS-DOS header:
- *
- * "The PE format starts with an MS-DOS stub of exactly the following 128 bytes to
- * be placed at the front of the module."
- *
- * We are only checking for MZ (Mark Zbikowski)
- */
-
- seek(0);
- fileFormatCheck(readByte() != 0x4d, "Invalid PE file format: " + filename); // 'M'
- fileFormatCheck(readByte() != 0x5a, "Invalid PE file format: " + filename); // 'Z'
-
- /** Ecma 335, 25.2.1 MS-DOS header:
- *
- * "At offset 0x3c in the DOS header is a 4-byte unsigned integer offset, lfanew,
- * to the PE signature (shall be "PE\0\0"), immediately followed by the PE file header."
- */
-
- seek(0x3c);
- PE_SIGNATURE_OFFSET = readInt();
- seek(PE_SIGNATURE_OFFSET);
- // start of PE signature (a signature that is just 4 bytes long)
- fileFormatCheck(readByte() != 0x50, "Invalid PE file format: " + filename); // 'P'
- fileFormatCheck(readByte() != 0x45, "Invalid PE file format: " + filename); // 'E'
- fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); // 0
- fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); // 0
-
- //trace("PE signature offset = 0x" + Table.int2hex(PE_SIGNATURE_OFFSET));
-
- COFF_HEADER_OFFSET = PE_SIGNATURE_OFFSET + 4;
- PE_HEADER_OFFSET = COFF_HEADER_OFFSET + 20;
-
- seek(COFF_HEADER_OFFSET);
-
- /* start of PE file header, Sec. 25.2.2 in Partition II */
- skip(2); // Machine (always 0x14c)
- numOfSections = readShort(); // Number of sections; indicates size of the Section Table
- Date timeStamp = new Date(readInt() * 1000L);
- skip(2 * INT_SIZE); // skip Pointer to Symbol Table (always 0) and Number of Symbols (always 0)
- optHeaderSize = readShort();
- int characteristics = readShort();
- isDLL = (characteristics & 0x2000) != 0;
-
- seek(PE_HEADER_OFFSET + 208); // p.157, Partition II
-
- CLI_RVA = readInt(); // called "Data Directory Table" in Ch. 4 of Expert IL book
- CLI_Length = readInt();
- //trace("CLI_RVA = 0x" + Table.int2hex(CLI_RVA));
- //trace("CLI_Length = 0x" + Table.int2hex(CLI_Length));
-
- sections = new PESection[numOfSections];
-
- seek(PE_HEADER_OFFSET + optHeaderSize); // go to the sections descriptors
-
- for (int i = 0; i < numOfSections; i++) {
- seek(PE_HEADER_OFFSET + optHeaderSize + i * 40);
- sections[i] = new PESection(this);
- //sections[i].dump(System.out);
- }
-
- seek(fromRVA(CLI_RVA));
- skip(8);
- rvaMetadata = readInt();
- posMetadata = fromRVA(rvaMetadata);
- //trace("rvaMetadata = 0x" + Table.int2hex(rvaMetadata));
- //trace("posMetadata = 0x" + Table.int2hex(posMetadata));
-
- seek(posMetadata);
- int magic = readInt();
- //trace("Magic metadata signature = 0x" + Table.int2hex(magic));
- fileFormatCheck(magic != 0x424a5342, "Invalid metadata signature!");
- skip(8);
-
- int strlength = readInt();
- //trace("version name string length = " + strlength);
- skip(strlength);
- align(INT_SIZE, posMetadata);
- //trace("position of flags = 0x" + Table.int2hex((int)pos()));
- skip(2); // ignore the flags
- numOfStreams = readShort();
- //trace("Number of metadata streams = " + numOfStreams);
-
- for (int i = 0; i < numOfStreams; i++) {
- PEStream strm = new PEStream(this);
- //strm.dump(System.out);
- if (strm.name.equals("#~")
- || strm.name.equals("#-")) Meta = strm;
- if (strm.name.equals("#Strings")) Strings = strm;
- if (strm.name.equals("#US")) US = strm;
- if (strm.name.equals("#Blob")) Blob = strm;
- if (strm.name.equals("#GUID")) GUID = strm;
- }
-
- seek(Meta.offset);
- skip(6);
- heapSizes = readByte();
- StringIsShort = (heapSizes & 0x01) == 0;
- GUIDIsShort = (heapSizes & 0x02) == 0;
- BlobIsShort = (heapSizes & 0x04) == 0;
-
- skip(1);
- long tablesMask = readLong();
- long nonStandardTables = tablesMask & ~Table.VALID_TABLES_MASK;
- skip(8); //go to the list of number of rows
- for (int i = 0; i < tables.length; i++) {
- tables[i] = Table.newTable
- (this, i, ((tablesMask >> i) & 0x01) != 0 ? readInt() : 0);
- }
-
- initIndexSize();
- initTableRefs();
- // populate the tables from the CLI image file
- long start = pos();
- for (int i = 0; i < tables.length; i++)
- start = tables[i].init(start);
-
- } // PEFile()
-
-
- public final int[] indexSize = new int[Table.TABLE_SET_LENGTH];
-
- private void initIndexSize() {
- for (int i = 0; i < Table.TABLE_SET_LENGTH; i++) {
- indexSize[i] = 2;
- int[] tableSet = Table.TableSet[i];
- int treshold = (65536 >> Table.NoBits[i]);
- for (int j = 0; j < tableSet.length; j++) {
- if (tableSet[j] >= 0) {
- Table t = tables[tableSet[j]];
- if (t.rows >= treshold) {
- indexSize[i] = 4;
- break;
- }
- }
- }
- }
- }
-
- protected void initModule(PEModule module) {
- if (pemodule != null)
- throw new RuntimeException("File " + this
- + " has already been assigned module "
- + pemodule + "; new module is " + module);
- this.pemodule = module;
- }
-
- //##########################################################################
-
- public ModuleDef ModuleDef;
- public ModuleDef ModuleDef(int i) {
- ModuleDef.readRow(i);
- return ModuleDef;
- }
-
- public TypeRef TypeRef;
-
- public TypeDef TypeDef;
- public TypeDef TypeDef(int i) {
- TypeDef.readRow(i);
- return TypeDef;
- }
-
- public FieldTrans FieldTrans;
- public FieldTrans FieldTrans(int i) {
- FieldTrans.readRow(i);
- return FieldTrans;
- }
-
- public FieldDef FieldDef;
- public FieldDef FieldDef(int i) {
- FieldDef.readRow(i);
- return FieldDef;
- }
-
- public MethodTrans MethodTrans;
- public MethodTrans MethodTrans(int i) {
- MethodTrans.readRow(i);
- return MethodTrans;
- }
-
- public MethodDef MethodDef;
- public MethodDef MethodDef(int i) { MethodDef.readRow(i); return MethodDef; }
-
-
- public ParamDef ParamDef;
- public ParamDef ParamDef(int i) { ParamDef.readRow(i); return ParamDef; }
-
- public GenericParam GenericParam;
-
- public GenericParam GenericParam(int i) {
- GenericParam.readRow(i);
- return GenericParam;
- }
-
- public MethodSpec MethodSpec;
-
- public MethodSpec MethodSpec(int i) {
- MethodSpec.readRow(i);
- return MethodSpec;
- }
-
- public GenericParamConstraint GenericParamConstraint;
-
- public GenericParamConstraint GenericParamConstraint(int i) {
- GenericParamConstraint.readRow(i);
- return GenericParamConstraint;
- }
-
- public InterfaceImpl InterfaceImpl;
- public MemberRef MemberRef;
- public Constant Constant;
- public CustomAttribute CustomAttribute;
- public FieldMarshal FieldMarshal;
- public DeclSecurity DeclSecurity;
- public ClassLayout ClassLayout;
- public FieldLayout FieldLayout;
- public StandAloneSig StandAloneSig;
- public EventMap EventMap;
- public EventDef EventDef;
- public PropertyMap PropertyMap;
- public PropertyDef PropertyDef;
- public MethodSemantics MethodSemantics;
- public MethodImpl MethodImpl;
- public ModuleRef ModuleRef;
- public TypeSpec TypeSpec;
- public ImplMap ImplMap;
- public FieldRVA FieldRVA;
- public AssemblyDef AssemblyDef;
- public AssemblyRef AssemblyRef;
- public FileDef FileDef;
- public ExportedType ExportedType;
- public ManifestResource ManifestResource;
- public NestedClass NestedClass;
-
-
- private void initTableRefs() {
- ModuleDef = (ModuleDef) getTable(Table.ModuleDef.ID);
- TypeRef = (TypeRef) getTable(Table.TypeRef.ID);
- TypeDef = (TypeDef) getTable(Table.TypeDef.ID);
- FieldTrans = (FieldTrans) getTable(Table.FieldTrans.ID);
- FieldDef = (FieldDef) getTable(Table.FieldDef.ID);
- MethodTrans = (MethodTrans) getTable(Table.MethodTrans.ID);
- MethodDef = (MethodDef) getTable(Table.MethodDef.ID);
- ParamDef = (ParamDef) getTable(Table.ParamDef.ID);
- InterfaceImpl = (InterfaceImpl) getTable(Table.InterfaceImpl.ID);
- MemberRef = (MemberRef) getTable(Table.MemberRef.ID);
- Constant = (Constant) getTable(Table.Constant.ID);
- CustomAttribute = (CustomAttribute) getTable(Table.CustomAttribute.ID);
- FieldMarshal = (FieldMarshal) getTable(Table.FieldMarshal.ID);
- DeclSecurity = (DeclSecurity) getTable(Table.DeclSecurity.ID);
- ClassLayout = (ClassLayout) getTable(Table.ClassLayout.ID);
- FieldLayout = (FieldLayout) getTable(Table.FieldLayout.ID);
- StandAloneSig = (StandAloneSig) getTable(Table.StandAloneSig.ID);
- EventMap = (EventMap) getTable(Table.EventMap.ID);
- EventDef = (EventDef) getTable(Table.EventDef.ID);
- PropertyMap = (PropertyMap) getTable(Table.PropertyMap.ID);
- PropertyDef = (PropertyDef) getTable(Table.PropertyDef.ID);
- MethodSemantics = (MethodSemantics) getTable(Table.MethodSemantics.ID);
- MethodImpl = (MethodImpl) getTable(Table.MethodImpl.ID);
- ModuleRef = (ModuleRef) getTable(Table.ModuleRef.ID);
- TypeSpec = (TypeSpec) getTable(Table.TypeSpec.ID);
- ImplMap = (ImplMap) getTable(Table.ImplMap.ID);
- FieldRVA = (FieldRVA) getTable(Table.FieldRVA.ID);
- AssemblyDef = (AssemblyDef) getTable(Table.AssemblyDef.ID);
- AssemblyRef = (AssemblyRef) getTable(Table.AssemblyRef.ID);
- FileDef = (FileDef) getTable(Table.FileDef.ID);
- ExportedType = (ExportedType) getTable(Table.ExportedType.ID);
- NestedClass = (NestedClass) getTable(Table.NestedClass.ID);
- ManifestResource =
- (ManifestResource) getTable(Table.ManifestResource.ID);
- GenericParam = (GenericParam) getTable(Table.GenericParam.ID);
- MethodSpec = (MethodSpec) getTable(Table.MethodSpec.ID);
- GenericParamConstraint = (GenericParamConstraint) getTable(Table.GenericParamConstraint.ID);
- }
-
- public static String long2hex(long a) {
- StringBuffer str = new StringBuffer("0000000000000000");
- str.append(Long.toHexString(a));
- int l = str.length();
- return str.substring(l - 16, l);
- }
-
- public static String int2hex(int a) {
- StringBuffer str = new StringBuffer("00000000");
- str.append(Integer.toHexString(a));
- int l = str.length();
- return str.substring(l - 8, l);
- }
-
- public static String short2hex(int a) {
- StringBuffer str = new StringBuffer("0000");
- str.append(Integer.toHexString(a));
- int l = str.length();
- return str.substring(l - 4, l);
- }
-
- public static String byte2hex(int a) {
- StringBuffer str = new StringBuffer("00");
- str.append(Integer.toHexString(a));
- int l = str.length();
- return str.substring(l - 2, l);
- }
-
- public static String bytes2hex(byte[] buf) {
- StringBuffer str = new StringBuffer();
- for (int i = 0; i < buf.length; i++) {
- str.append(byte2hex(buf[i]));
- if (i < buf.length - 1)
- str.append(" ");
- }
- return str.toString();
- }
-
- //##########################################################################
- // filename
-
- public File getUnderlyingFile() {
- return underlyingFile;
- }
-
- /**
- * @return the absolute path of the file
- */
- public String getAbsolutePath() {
- return underlyingFile.getAbsolutePath();
- }
-
- /**
- * @return the name of this file
- */
- public String getName() {
- return underlyingFile.getName();
- }
-
- /**
- * @return
- */
- public String getParent() {
- return underlyingFile.getParent();
- }
-
- /**
- * @return the file representing the directory the file belongs to
- */
- public File getParentFile() {
- return underlyingFile.getParentFile();
- }
-
- public String toString() {
- return getAbsolutePath();
- }
-
- //##########################################################################
- // file pointer manipulation methods
-
- /** Returns the current position in the file. */
- public int pos() {
- return buf.position();
- }
-
- /** Go to the specified position in the file. */
- public void seek(int pos) {
- buf.position(pos);
- }
-
-
- /** Align the current position in the file. */
- public void align(int base) { align(base, 0); }
-
- /** Align the current position in a section starting at offset. */
- public void align(int base, int offset) {
- int p = pos() - offset;
- seek( offset + ((p % base) == 0 ? p : (p/base + 1) * base));
- }
-
- /** Computes the position in the file that corresponds to the given RVA. */
- public int fromRVA(int rva) {
- int i;
- for(i = 0; i < numOfSections; i++)
- if(sections[i].virtAddr <= rva &&
- rva <= (sections[i].virtAddr + sections[i].virtSize))
- return rva - sections[i].virtAddr + sections[i].realAddr;
- throw new RuntimeException("RVA 0x" + Integer.toHexString(rva) +
- " is not within this file's sections!");
- }
-
- /** Go to the specified RVA (Relative Virtual Address). */
- public void gotoRVA(int rva) {
- seek(fromRVA(rva));
- }
-
- /** Move the forward in the file by the specified number of bytes. */
- public void skip(int n) {
- buf.position(buf.position() + n);
- }
-
- /**
- * Returns a memory mapped little-endian buffer
- * for the specified region of the file.
- */
- public MappedByteBuffer mapBuffer(long offset, int size) {
- try {
- MappedByteBuffer b = file.getChannel()
- .map(FileChannel.MapMode.READ_ONLY, offset, size);
- b.order(java.nio.ByteOrder.LITTLE_ENDIAN);
- return b;
- } catch (IOException e) { throw new RuntimeException(e); }
- }
-
- /** Returns a buffer from the given offset to the end of the file. */
- public ByteBuffer getBuffer(long offset, int size) {
- buf.mark();
- buf.position((int)offset);
- ByteBuffer bb = buf.slice();
- buf.reset();
- bb.limit(size);
- bb.order(java.nio.ByteOrder.LITTLE_ENDIAN);
- return bb;
- }
-
- //##########################################################################
- // file read methods
-
- /**
- * Read bs.length number of bytes
- */
- public void read(byte[] bs) {
- buf.get(bs);
- }
-
- /**
- * Read 1-byte integer from the current position in the file.
- */
- public int readByte() {
- return buf.get();
- }
-
- /**
- * Read 2-byte integer from the current position in the file.
- */
- public int readShort() {
- return buf.getShort();
- }
-
- /**
- * Read 4-byte integer from the current position in the file.
- */
- public int readInt() {
- return buf.getInt();
- }
-
- /**
- * Read 8-byte integer from the current position in the file.
- */
- public long readLong() {
- return buf.getLong();
- }
-
- /**
- * @return the size of string indeces for this file.
- */
- public int getStringIndexSize() {
- return StringIsShort ? 2 : 4;
- }
-
- /**
- * @return the size of GUID indeces for this file.
- */
- public int getGUIDIndexSize() {
- return GUIDIsShort ? 2 : 4;
- }
-
- /**
- * @return the size of Blob indeces for this file.
- */
- public int getBlobIndexSize() {
- return BlobIsShort ? 2 : 4;
- }
-
- /**
- * @return the size of the index to tableID for this file;
- * @param tableID the ID of the table
- */
- public int getTableIndexSize(int tableID) {
- return tables[tableID].isShort ? 2 : 4;
- }
-
- /**
- * @return the size of the index to a set of tables with the given @param TableSetID
- * @param tableSetID the ID of the table set
- */
- public int getTableSetIndexSize(int tableSetID) {
- return indexSize[tableSetID];
- }
-
- /**
- * Read a String index from the current position in the file.
- * @return an index into the String stream
- */
- public int readStringIndex() {
- return StringIsShort ? readShort() : readInt();
- }
-
- /**
- * Read a GUID index from the current position in the file.
- * @return an index in to the GUID stream
- */
- public int readGUIDIndex() {
- return GUIDIsShort ? readShort() : readInt();
- }
-
- /**
- * Read a Blob index from the current position in the file.
- * @return an index into the Blob stream
- */
- public int readBlobIndex() {
- return BlobIsShort ? readShort() : readInt();
- }
-
- /** Read an entry interpreted as index into table @param tableID. */
- public int readTableIndex(int tableId) {
- return tables[tableId].isShort ? readShort() : readInt();
- }
-
- /***/
- public int readTableSetIndex(int tableSetId) {
- return indexSize[tableSetId] == 2 ? readShort() : readInt();
- }
-
- /**
- * Read a string from the String stream
- * @return the string at the given position
- * @param pos the position of the string in the String stream
- */
- public String getString(int pos) {
- String s = Strings.getString(pos);
- return s;//.length() == 0 ? null : s;
- }
-
- /**
- * Read a string from the US (User Strings) stream
- * @return the string at the given position
- * @param pos the position of the string in the US stream
- */
- public String getUString(int pos) {
- return US.getString(pos);
- }
-
- /**
- * Read a blob from the Blob Stream
- * @return the blob at the given position
- * @param pos the position of the blob in the Blob stream
- */
- public byte[] getBlob(int pos) {
- return Blob.getBlob(pos);
- }
-
- /***/
- public Sig getSignature(int pos) {
- //return new Sig(getBlob(pos));
- return Blob.getSignature(pos);
- }
-
- /***/
- public byte[] getGUID(int pos) {
- return GUID.getGUID(pos);
- }
-
- /**
- * @return the table with the corresponding ID.
- */
- public final Table getTable(int tableID) {
- return tables[tableID];
- }
-
- //##########################################################################
-
- /***/
- void trace(String msg) {
- System.out.println("[trace] " + msg);
- }
-
- //##########################################################################
-
- public Sig newSignature(ByteBuffer buf) {
- return new Sig(buf);
- }
-
- /**
- */
- public class Sig implements Signature {
-
- //######################################################################
- // instance members
-
- protected final ByteBuffer buf;
- protected final int pos;
- protected final int length;
-
- public Sig(ByteBuffer buf) {
- this.buf = buf;
- //int tmpPos = buf.position();
- length = decodeInt();
- this.pos = buf.position();
- }
-
- public String toString() {
- StringBuffer b = new StringBuffer("(");
- int savedPos = buf.position();
- reset();
- for (int i = 0; i < length; i++) {
- b.append(byte2hex(readByte()));
- if (i < length - 1)
- b.append(" ");
- }
- buf.position(savedPos);
- return b.append(")").toString();
- }
-
- public Sig reset() { buf.position(pos); return this; }
-
- public int pos() { return buf.position() - pos; }
-
- /** @return the byte at the current position in the signature Blob.
- * Stay at the same position
- */
- public int getByte() {
- return (buf.get(buf.position()) + 0x100) & 0xff;
- }
-
- /** @return the byte at the current position in the signature Blob.
- * Move to the next byte.
- */
- public int readByte() { return (buf.get() + 0x100) & 0xff; }
-
- /** Skip the current byte if equal to the given value. */
- public void skipByte(int b) { if (b == getByte()) buf.get(); }
-
- /** Decodes an integer from the signature Blob.
- * @return the decoded integer
- */
- public int decodeInt() {
- int res = readByte();
- if ((res & 0x80) != 0) {
- res = ((res & 0x7f) << 8) | readByte();
- if ((res & 0x4000) != 0)
- res = ((res & 0x3fff)<<16) | (readByte()<<8) | readByte();
- }
- return res;
- }
-
- /** @return - the type encoded at the current position in the signature
- * according to 23.2.12
- */
- public Type decodeType() {
- try { return decodeType0(); }
- catch (RuntimeException e) {
- System.out.println("" + pos() + "@" + this);
- throw e;
- }
- }
-
- public Type decodeType0() {
- Type type = null;
- int desc = readByte();
- switch (desc) {
- case ELEMENT_TYPE_BOOLEAN:type = Type.GetType("System.Boolean"); break;
- case ELEMENT_TYPE_CHAR: type = Type.GetType("System.Char"); break;
- case ELEMENT_TYPE_I1: type = Type.GetType("System.SByte"); break;
- case ELEMENT_TYPE_U1: type = Type.GetType("System.Byte"); break;
- case ELEMENT_TYPE_I2: type = Type.GetType("System.Int16"); break;
- case ELEMENT_TYPE_U2: type = Type.GetType("System.UInt16"); break;
- case ELEMENT_TYPE_I4: type = Type.GetType("System.Int32"); break;
- case ELEMENT_TYPE_U4: type = Type.GetType("System.UInt32"); break;
- case ELEMENT_TYPE_I8: type = Type.GetType("System.Int64"); break;
- case ELEMENT_TYPE_U8: type = Type.GetType("System.UInt64"); break;
- case ELEMENT_TYPE_R4: type = Type.GetType("System.Single"); break;
- case ELEMENT_TYPE_R8: type = Type.GetType("System.Double"); break;
- case ELEMENT_TYPE_OBJECT: type = Type.GetType("System.Object"); break;
- case ELEMENT_TYPE_STRING: type = Type.GetType("System.String"); break;
- case ELEMENT_TYPE_I: type = Type.GetType("System.IntPtr"); break;
- case ELEMENT_TYPE_U: type = Type.GetType("System.UIntPtr"); break;
- case ELEMENT_TYPE_PTR: // Followed by <type> token.
- if (getByte() == ELEMENT_TYPE_VOID) {
- readByte();
- type = Type.mkPtr(Type.GetType("System.Void"));
- } else type = Type.mkPtr(decodeType());
- break;
- case ELEMENT_TYPE_BYREF: /* although BYREF is not listed in 23.2.12. as possible alternative, this method is also called when parsing the signatures of a method param and a method return, which do allow for BYREF */
- type = Type.mkByRef(decodeType());
- break;
- case ELEMENT_TYPE_VALUETYPE: // Followed by TypeDefOrRefEncoded
- assert true;
- case ELEMENT_TYPE_CLASS:
- // Followed by <type> token
- type = pemodule.getTypeDefOrRef(decodeInt());
- if (type == null) throw new RuntimeException();
- break;
-
- case ELEMENT_TYPE_SZARRAY: // Single-dim array with 0 lower bound.
- skipCustomMods();
- type = Type.mkArray(decodeType(), 1);
- break;
- case ELEMENT_TYPE_ARRAY:
- // <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
- // ArrayShape defined in 23.2.13 ArrayShape
- Type elem = decodeType();
- int rank = decodeInt();
- int numSizes = decodeInt();
- for (int i = 0; i < numSizes; i++)
- decodeInt(); // TODO don't ignore
- int numLoBounds = decodeInt();
- for (int i = 0; i < numLoBounds; i++)
- decodeInt(); // TODO don't ignore
- type = Type.mkArray(elem, rank);
- break;
-
- // a grammar production from 23.2.12 Type
- // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncoded GenArgCount Type*
- case ELEMENT_TYPE_GENERICINST:
- int b = readByte();
- /*- TODO don't ignore b as done above. Should .NET valuetypes be represented as Scala case classes? */
- Type instantiatedType = pemodule.getTypeDefOrRef(decodeInt());
- int numberOfTypeArgs = decodeInt();
- Type[] typeArgs = new Type[numberOfTypeArgs];
- for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) {
- typeArgs[iarg] = decodeType();
- }
- type = new ConstructedType(instantiatedType, typeArgs);
- break;
-
- // another grammar production from 23.2.12 Type
- // ELEMENT_TYPE_VAR number The number non-terminal following MVAR
- // or VAR is an unsigned integer value (compressed).
- /* See also duplicate code in PEModule.java */
- case ELEMENT_TYPE_VAR:
- int typeArgAsZeroBased = decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, true);
- break;
-
- // another grammar production from 23.2.12 Type
- // ELEMENT_TYPE_MVAR number The number non-terminal following MVAR
- // or VAR is an unsigned integer value (compressed).
- /* See also duplicate code in PEModule.java */
- case ELEMENT_TYPE_MVAR:
- typeArgAsZeroBased = decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, false);
- break;
-
- case ELEMENT_TYPE_FNPTR:
- // Followed MethodDefSig or by MethodRefSig.
- case ELEMENT_TYPE_END:
- // Marks end of a list
- case ELEMENT_TYPE_CMOD_REQD:
- // Required modifier : followed by a TypeDef or TypeRef token.
- case ELEMENT_TYPE_CMOD_OPT:
- // Optional modifier : followed by a TypeDef or TypeRef token.
- case ELEMENT_TYPE_INTERNAL:
- // Implemented within the CLI.
- case ELEMENT_TYPE_MODIFIER:
- // Or'd with following element types.
- case ELEMENT_TYPE_SENTINEL:
- // Sentinel for varargs method signature.
- case ELEMENT_TYPE_PINNED:
- // Denotes a local variable that points at a pinned object.
- default:
- throw new RuntimeException(byte2hex(desc) +
- "@" + pos() + " in " + this);
-
- }
- if (type == null) throw new RuntimeException();
- return type;
- } // decodeType0()
-
- public PECustomMod decodeFieldType() {
- skipByte(FIELD); // 0x06
- CustomModifier[] cmods = getCustomMods();
- Type fieldType = decodeType();
- return new PECustomMod(fieldType, cmods);
- }
-
- /** decodes the return type of a method signature (22.2.11). */
- public Type decodeRetType() {
- skipCustomMods();
- switch (getByte()) {
- case ELEMENT_TYPE_VOID:
- readByte();
- return Type.GetType("System.Void");
- case ELEMENT_TYPE_TYPEDBYREF:
- return Type.GetType("System.TypedReference");
- case ELEMENT_TYPE_BYREF:
- return decodeType();
- default:
- return decodeType();
- }
- }
-
- public Type decodeParamType() {
- skipCustomMods();
- switch (getByte()) {
- case ELEMENT_TYPE_BYREF:
- return decodeType();
- case ELEMENT_TYPE_TYPEDBYREF:
- return Type.GetType("System.TypedReference");
- default:
- return decodeType();
- }
- }
-
- public void skipCustomMods() {
- while (getByte() == ELEMENT_TYPE_CMOD_OPT /* 0x20 */
- || getByte() == ELEMENT_TYPE_CMOD_REQD /* 0x1f */ )
- {
- boolean isREQD = (getByte() == ELEMENT_TYPE_CMOD_REQD); // 0x1f
- // skip the tag 23.2.7
- readByte();
- // skip the TypeDefOrRefEncoded (23.2.8)
- Type ignored = pemodule.getTypeDefOrRef(decodeInt());
- if(isREQD) {
- // System.err.println("ELEMENT_TYPE_CMOD_REQD: " + ignored);
- // throw new RuntimeException("Reqired CMOD: " + ignored);
- }
- }
- }
-
- /**
- * @see CustomModifier
- */
- public CustomModifier[] getCustomMods() {
- java.util.List/*<CustomModifier>*/ cmods = new java.util.LinkedList();
- while (getByte() == ELEMENT_TYPE_CMOD_OPT || getByte() == ELEMENT_TYPE_CMOD_REQD) {
- boolean isReqd = (getByte() == ELEMENT_TYPE_CMOD_REQD);
- readByte(); // tag 23.2.7
- Type t = pemodule.getTypeDefOrRef(decodeInt()); // TypeDefOrRefEncoded (23.2.8)
- cmods.add(new CustomModifier(isReqd, t));
- }
- CustomModifier[] res = (CustomModifier[])cmods.toArray(new CustomModifier[0]);
- return res;
- }
-
- //######################################################################
-
- } // class Sig
-
- //##########################################################################
-
-} // class PEFile
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
deleted file mode 100644
index cb8cd8f098..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
+++ /dev/null
@@ -1,456 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-import ch.epfl.lamp.compiler.msil.util.Signature;
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.nio.ByteBuffer;
-
-/** Represents a module corresponding to a PE/COFF file
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEModule extends Module {
-
- //##########################################################################
-
- protected final PEFile pefile;
-
- private final int definingRow;
-
- private Type[] typeRefs = null;
-
- protected PEModule(PEFile pefile, int definingRow, String scopeName,
- Assembly assem)
- {
- super(pefile.getName(), pefile.getAbsolutePath(), scopeName, assem);
- this.pefile = pefile;
- this.definingRow = definingRow;
- pefile.initModule(this);
- pefile.TypeDef.load(); // load into memory
- //loadTypes();
- //pefile.FieldDef.load();
- //pefile.MethodDef.load();
- loadGlobals();
- }
-
- //##########################################################################
-
- public Type GetType(String typeName) {
- initTypes();
- Object o = typesMap.get(typeName);
- if (o == null) {
- //System.out.println("PEModule.GetType(): Unable to find type "
- // + typeName + " int module " + this);
- return null;
- }
- return o instanceof Type ? (Type)o
- : getTypeDef(((Integer)o).intValue());
- }
-
-
- /** Load information about the types defined in this module.
- */
- protected void loadTypes() {
- typeRefs = new Type[pefile.TypeRef.rows];
- final int nbTypes = pefile.TypeDef.rows;
- for (int row = 2; row <= nbTypes; row++) {
- String name = pefile.TypeDef(row).getFullName();
- typesMap.put(name, new Integer(row));
- }
- this.types = new Type[nbTypes - 1];
- for (int row = 2; row <= nbTypes; row++) {
- getTypeDef(row);
- }
- }
-
- /** Return the type defined at the given row in the TypeDef table.
- */
- Type getTypeDef(int row) {
- if (this.types[row - 2] != null)
- return this.types[row - 2];
-
- TypeDef type = pefile.TypeDef(row);
- int attrs = type.Flags;
- String name = type.getFullName();
-
- Type declType = null;
- if (TypeAttributes.isNested(attrs)) {
- for (int i = 1; i <= pefile.NestedClass.rows; i++) {
- pefile.NestedClass.readRow(i);
- if (pefile.NestedClass.NestedClass == row)
- declType = getTypeDef
- (pefile.NestedClass.EnclosingClass);
- }
- }
- Type t = new PEType
- (this, attrs, name, declType, Type.AuxAttr.None, pefile, row);
- types[row - 2] = t;
- addType(t);
- int[] tvarIdxes = pefile.GenericParam.getTVarIdxes(row);
- // if(tvarIdxes.length > 0) { System.out.println("Type: " + t); }
- for(int i = 0; i < tvarIdxes.length; i++) {
- GenericParamAndConstraints tvarAndConstraints = getTypeConstraints(tvarIdxes[i]);
- // add tvarAndConstraints as i-th TVar in t
- t.addTVar(tvarAndConstraints);
- }
- return t;
- }
-
- public GenericParamAndConstraints getTypeConstraints(int genParamIdx) {
- int tvarNumber = pefile.GenericParam(genParamIdx).Number;
- // tvarName can be null
- String tvarName = pefile.GenericParam.getName();
- boolean isInvariant = pefile.GenericParam.isInvariant();
- boolean isCovariant = pefile.GenericParam.isCovariant();
- boolean isContravariant = pefile.GenericParam.isContravariant();
- boolean isReferenceType = pefile.GenericParam.isReferenceType();
- boolean isValueType = pefile.GenericParam.isValueType();
- boolean hasDefaultConstructor = pefile.GenericParam.hasDefaultConstructor();
- // grab constraints
- int[] TypeDefOrRefIdxes = pefile.GenericParamConstraint.getTypeDefOrRefIdxes(genParamIdx);
- Type[] tCtrs = new Type[TypeDefOrRefIdxes.length];
- for(int i = 0; i < TypeDefOrRefIdxes.length; i++) {
- Type tConstraint = getTypeDefOrRef(TypeDefOrRefIdxes[i]);
- tCtrs[i] = tConstraint;
- // System.out.println("\t\tConstraint: " + tConstraint);
- }
- GenericParamAndConstraints res = new GenericParamAndConstraints(tvarNumber, tvarName, tCtrs,
- isInvariant, isCovariant, isContravariant,
- isReferenceType, isValueType, hasDefaultConstructor);
- return res;
- }
-
- /**
- * Load the desription of the module-global fields and methods
- */
- protected void loadGlobals() {
- //TODO:
- }
-
- protected void loadCustomAttributes(Type attributeType) {
- initAttributes(this, 1, Table.ModuleDef.ID, attributeType);
- }
-
- /** Return the type referenced by the given row in the TypeRef table.
- */
- Type getTypeRef(int row) {
- return getTypeRef(row, null);
- }
-
- /** Return the type referenced by the given row in the TypeRef table
- * only if it resides in the given assembly.
- * <i>Used by initCustomAttributes to avoid unnecessary loading
- * of referenced assemblies.</i>
- */
- Type getTypeRef(int row, Assembly inAssembly) {
- Type type = typeRefs[row - 1];
- if (type != null)
- return type;
-
- Table.TypeRef tr = pefile.TypeRef;
- tr.readRow(row);
- int tableId = Table.getTableId(Table._ResolutionScope,
- tr.ResolutionScope);
- int refRow = tr.ResolutionScope >> Table.NoBits[Table._ResolutionScope];
- final String typeName = tr.getFullName();
- pefile.getTable(tableId).readRow(refRow);
- switch (tableId) {
- case AssemblyRef.ID:
- String name = pefile.AssemblyRef.getName();
- if (inAssembly != null && !inAssembly.GetName().Name.equals(name))
- return null;
- Assembly assem = getAssembly(name);
- type = assem.GetType(typeName);
- if (type == null) {
- // HACK: the IKVM.OpenJDK.Core assembly is compiled against mscorlib.dll v2.0
- // The MSIL library cannot parse the v2.0 mscorlib because of generics, so we
- // use the v1.0
- // However, the java.io.FileDescriptor.FlushFileBuffers method uses a type
- // Microsoft.Win32.SafeHandles.SafeFileHandle, which only exists in mscorlib
- // v2.0
- // For now, jsut return Object (fine as long as we don't use that method).
- Assembly asmb = getAssembly("mscorlib");
- type = asmb.GetType("System.Object");
- //throw new RuntimeException("Failed to locate type " +
- //typeName + " in assembly " + assem);
- }
- break;
- case ModuleDef.ID:
- assert refRow == 1;
- type = this.GetType(typeName);
- //assert type != null;
- break;
- case TypeRef.ID:
- Type nestingType = getTypeRef(refRow);
- String nestedName = typeName;
- type = nestingType.GetNestedType(nestedName);
- break;
- case ModuleRef.ID:
- type = getAssembly(pefile.ModuleRef.getName()).GetType(typeName);
- default:
- throw new RuntimeException(refRow + "@" + pefile.getTable(tableId).getTableName()/* PEFile.byte2hex(tableId)*/);
- }
- if (typeRefs[row - 1] != null)
- System.out.println("TypeRef[" + PEFile.short2hex(row) + "] " +
- "changing type " + typeRefs[row - 1] +
- " for type " + type);
- typeRefs[row - 1] = type;
- assert type != null : "Couldn't find type " + typeName;
- return type;
- }
-
- private Assembly getAssembly(String name) {
- Assembly assem = Assembly.getAssembly(name);
- if (assem != null)
- return assem;
- java.io.File dir = pefile.getParentFile();
- assem = Assembly.LoadFrom(dir, name);
- if (assem != null)
- return assem;
- try {
- dir = pefile.getUnderlyingFile().getCanonicalFile().getParentFile();
- } catch (java.io.IOException e) {
- throw new RuntimeException(e);
- }
- assem = Assembly.LoadFrom(dir, name);
- if (assem != null)
- return assem;
- throw new RuntimeException("Cannot find assembly: " + name);
-
- }
-
- /** Return the type corresponding to TypeDefOrRef coded index.
- * @param index - TypeDefOrRef coded index according to 23.2.6.
- */
- public Type getTypeDefOrRef(int index) {
- int tableId = Table.getTableId(Table._TypeDefOrRef, index);
- int row = index >> Table.NoBits[Table._TypeDefOrRef];
- Type type = null;
- switch (tableId) {
- case Table.TypeDef.ID:
- type = getTypeDef(row);
- break;
- case Table.TypeRef.ID:
- return getTypeRef(row);
- case Table.TypeSpec.ID:
- Table.TypeSpec ts = pefile.TypeSpec;
- ts.readRow(row);
- int posInBlobStream = ts.Signature;
- byte[] blobArrWithLengthStripped = pefile.Blob.getBlob(posInBlobStream);
- byte[] compressedUInt = compressUInt(blobArrWithLengthStripped.length);
- byte[] byteArr = new byte[blobArrWithLengthStripped.length + compressedUInt.length];
- System.arraycopy(compressedUInt, 0, byteArr, 0, compressedUInt.length);
- System.arraycopy(blobArrWithLengthStripped, 0, byteArr, compressedUInt.length, blobArrWithLengthStripped.length);
- ByteBuffer buf = ByteBuffer.wrap(byteArr);
- Sig sig = pefile.new Sig(buf);
- int desc = sig.readByte();
-
- switch (desc) {
-
- // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncodred GenArgCount Type*
- case Signature.ELEMENT_TYPE_GENERICINST: // i.e. 0x15
- int b = sig.readByte(); // i.e. (0x12 | 0x11)
- /* TODO don't ignore b as done above */
- Type instantiatedType = getTypeDefOrRef(sig.decodeInt()); // TypeDefOrRefEncoded
- int numberOfTypeArgs = sig.decodeInt(); // GenArgCount
- Type[] typeArgs = new Type[numberOfTypeArgs];
- for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) {
- typeArgs[iarg] = sig.decodeType(); // Type*
- }
- type = new ConstructedType(instantiatedType, typeArgs);
- break;
-
- /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob
- but I've found it in assemblies compiled from C# 3.0.
- See also duplicate code in PEFile.java */
- case Signature.ELEMENT_TYPE_VAR:
- int typeArgAsZeroBased = sig.decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, true);
- break;
-
- /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob
- but I've found it in assemblies compiled from C# 3.0.
- See also duplicate code in PEFile.java */
- case Signature.ELEMENT_TYPE_MVAR:
- typeArgAsZeroBased = sig.decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, false);
- break;
-
- case Signature.ELEMENT_TYPE_SZARRAY: // Single-dim array with 0 lower bound.
- sig.skipCustomMods();
- type = Type.mkArray(sig.decodeType(), 1);
- break;
-
- case Signature.ELEMENT_TYPE_ARRAY:
- // <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
- // ArrayShape defined in 23.2.13 ArrayShape
- Type elem = sig.decodeType();
- int rank = sig.decodeInt();
- int numSizes = sig.decodeInt();
- for (int i = 0; i < numSizes; i++)
- sig.decodeInt(); // TODO don't ignore
- int numLoBounds = sig.decodeInt();
- for (int i = 0; i < numLoBounds; i++)
- sig.decodeInt(); // TODO don't ignore
- type = Type.mkArray(elem, rank);
- break;
-
- default:
- // TODO remaining grammar productions in 23.2.14 are for PTR and FNPTR only
- throw new RuntimeException("PEModule.getTypeDefOrRef(): TypeSpec");
- }
- break;
- default:
- throw new RuntimeException("PEModule.getTypeDefOrRef(): oops!");
- }
- return type;
- }
-
- private byte[] compressUInt(int u) {
- // 23.2 in Partition II
- // TODO add tests based on the examples in 23.2 in Partition II
- // the CCI implementation is WriteCompressedUInt
-
- /* informal discussion at http://www.cnblogs.com/AndersLiu/archive/2010/02/09/en-compressed-integer-in-metadata.html */
- if (u <= 127 && 0 <= u) {
- return new byte[]{(byte) u};
- } else if (u > 127 && u <= (2 ^ 14 - 1)) {
- byte loByte = (byte)(u & 0xff);
- byte hiByte = (byte)((u >> 8) | 0x80);
- byte[] res = new byte[] { hiByte, loByte };
- return res;
- } else {
- byte b0 = (byte)(u & 0xff);
- byte b1 = (byte)((u & 0xff00)>>8);
- byte b2 = (byte)((u & 0xff0000)>>16);
- byte b3 = (byte)((u >> 24)|0xc0);
- byte[] res = new byte[] { b3, b2, b1, b0 };
- return res;
- }
- }
-
- /**
- * Returns the method defined at the given row of the MethodDef table
- * by looking up the type that defines the method.
- */
- MethodBase getMethod(int row) {
- for (int i = 0; i < types.length; i++) {
- PEType type = (PEType)types[i];
- if ((type.methodListBeg <= row) && (row < type.methodListEnd)) {
- type.initMethods();
- return type.methoddefs[row - type.methodListBeg];
- }
- }
- throw new RuntimeException("In module " + this
- + ": cannot find type defining method 0x"
- + PEFile.int2hex(row));
- }
-
- /** Returns the member referenced by the given row of the MemberRef table.
- */
- protected MemberInfo getMemberRef(int row) {
- return getMemberRef(row, null);
- }
-
- /** Returns the member referenced by the given row of the MemberRef table
- * if defined in the given assembly.
- * <i>Used by initCustomAttributes to avoid unnecessary loading of
- * referenced assemblies</i>
- */
- protected MemberInfo getMemberRef(int row, Assembly inAssembly) {
- MemberInfo member = null;
- MemberRef mref = pefile.MemberRef;
- mref.readRow(row);
- int mtbl = Table.getTableId(Table._MemberRefParent, mref.Class);
- int mind = Table.getTableIndex(Table._MemberRefParent, mref.Class);
- switch (mtbl) {
- case TypeRef.ID:
- Type type = getTypeRef(mind, inAssembly);
- if (type == null)
- return null;
- Sig sig = mref.getSignature();
- int callconv = sig.readByte(); // should be 0x20
- int paramCount = sig.decodeInt();
- //sig.skipByte(Signature.ELEMENT_TYPE_BYREF); //from MethodDef
- Type retType = sig.decodeRetType();
- Type[] paramType = new Type[paramCount];
- for (int i = 0; i < paramCount; i++)
- paramType[i] = sig.decodeParamType();
-
- String memberName = mref.getName();
- if (memberName.equals(ConstructorInfo.CTOR) ||
- memberName.equals(ConstructorInfo.CCTOR))
- {
- member = type.GetConstructor(paramType);
- } else {
- member = type.GetMethod(memberName, paramType);
- }
- assert member != null : type + "::" + memberName;
- break;
- case ModuleRef.ID:
- case MethodDef.ID:
- case TypeSpec.ID:
- throw new RuntimeException("initCustomAttributes: "
- + pefile.getTable(mtbl).getTableName());
- }
- return member;
- }
-
- protected void initCustomAttributes(Type attributeType) {
- initAttributes(this, definingRow, Table.ModuleDef.ID, attributeType);
- }
-
- // explicitly only package-visible
- void initAttributes(CustomAttributeProvider cap, int definingRow,
- int sourceTableId, Type attributeType)
- {
- int parentIndex = Table.encodeIndex(definingRow,
- Table._HasCustomAttribute,
- sourceTableId);
- Table.CustomAttribute attrs = pefile.CustomAttribute;
- for (int row = 1; row <= attrs.rows; row++) {
- ConstructorInfo attrConstr = null;
- attrs.readRow(row);
- if (attrs.Parent == parentIndex) {
- int tableId = Table.getTableId(Table._CustomAttributeType,
- attrs.Type);
- int ind = Table.getTableIndex(Table._CustomAttributeType,
- attrs.Type);
- switch (tableId) {
- case MethodDef.ID:
- attrConstr = (ConstructorInfo)this.getMethod(ind);
- break;
- case MemberRef.ID:
- //System.out.println(PEFile.short2hex(ind) + "@MemberRef");
- Assembly attrAssem =
- attributeType == null ? null : attributeType.Assembly();
- MemberInfo mi = this.getMemberRef(ind, attrAssem);
- if (mi != null) {
- assert mi instanceof ConstructorInfo
- : "Expected ConstructorInfo; found " + mi;
- attrConstr = (ConstructorInfo)mi;
- }
- break;
- default:
- throw new RuntimeException();
- }
- if (attrConstr != null
- && (attrConstr.DeclaringType == attributeType
- || attributeType == null))
- cap.addCustomAttribute(attrConstr, attrs.getValue());
- }
- }
- }
-
- //##########################################################################
-
-} // class PEModule
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java b/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
deleted file mode 100644
index 418c6603b3..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
+++ /dev/null
@@ -1,419 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-import ch.epfl.lamp.compiler.msil.util.Signature;
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-import java.util.ArrayList;
-
-/**
- * Represents a type from a .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEType extends Type implements Signature {
-
- //##########################################################################
-
- /** The PEFile that holds the description of the type. */
- final PEFile file;
-
- /** The number of the row in the TypeDef table defining the type. */
- final int definingRow;
-
- /** The row of the first method in the MethodDef table. */
- final int methodListBeg;
-
- /** The row of the last method in the MethodDef table + 1. */
- final int methodListEnd;
-
- /** @param definingRow - the index in the TypeDef table where
- * the type description is.
- */
- PEType(PEModule module,
- int attributes,
- String fullName,
- Type declType,
- int auxAttr,
- PEFile file,
- int definingRow)
- {
- super(module, attributes, fullName, null, null, declType, auxAttr);
- this.file = file;
- this.definingRow = definingRow;
- methodListBeg = file.TypeDef(definingRow).MethodList;
- methodListEnd = definingRow < file.TypeDef.rows
- ? file.TypeDef(definingRow + 1).MethodList
- : file.MethodDef.rows + 1;
- }
-
- //##########################################################################
- // lazy type construction methods
-
- protected void loadBaseType() {
- TypeDef type = file.TypeDef(definingRow);
- baseType = type.Extends == 0 ? null
- : ((PEModule)Module).getTypeDefOrRef(type.Extends);
- }
-
- protected void loadFields() {
- // the list of the declared fields starts from the
- // FieldList index in the TypeDef table up to the smaller of the:
- // - the last row of the FieldDef table
- // - the start of the next list of fields determined by the
- // FieldList index of the next row in the TypeDef table
- final ArrayList fields = new ArrayList();
- int fieldListBeg = file.TypeDef(definingRow).FieldList;
- int fieldListEnd = file.FieldDef.rows + 1;
- if (definingRow < file.TypeDef.rows)
- fieldListEnd = file.TypeDef(definingRow + 1).FieldList;
-
- for (int row = fieldListBeg; row < fieldListEnd; row++) {
- int frow = file.FieldTrans.rows == 0
- ? row : file.FieldTrans(row).Field;
- int attrs = file.FieldDef(frow).Flags;
- String name = file.FieldDef.getName();
- //System.out.println("\t-->Loading field: " + name);
- Sig sig = file.FieldDef.getSignature();
- PECustomMod pecmod = sig.decodeFieldType();
- Object val = null;
- Table.Constant consts = file.Constant;
- for (int i = 1; i <= consts.rows; i++) {
- consts.readRow(i);
- int tableId = Table.getTableId(Table._HasConstant,consts.Parent);
- int refRow = consts.Parent >> Table.NoBits[Table._HasConstant];
- if (tableId == Table.FieldDef.ID && refRow == frow)
- val = consts.getValue();
- }
- FieldInfo field = new PEFieldInfo(row, name, attrs, pecmod, val);
- if (field.Name.equals("value__") && field.IsSpecialName()) {
- assert underlyingType == null : underlyingType.toString();
- underlyingType = field.FieldType;
- }
- fields.add(field);
- }
- this.fields = (FieldInfo[])
- fields.toArray(FieldInfo.EMPTY_ARRAY);
- fields.clear();
- }
-
- protected MethodBase[] methoddefs;
-
- protected MethodInfo getMethod(int n) {
- return (MethodInfo)methoddefs[n - methodListBeg];
- }
-
- protected void loadMethods() {
- methoddefs = new MethodBase[methodListEnd - methodListBeg];
-
- final ArrayList methods = new ArrayList();
- final ArrayList constrs = new ArrayList();
- PEModule pemodule = (PEModule) Module;
- for (int row = methodListBeg; row < methodListEnd; row++) {
- int mrow = file.MethodTrans.rows == 0
- ? row : file.MethodTrans(row).Method;
- int attrs = file.MethodDef(mrow).Flags;
- String name = file.MethodDef.getName();
- Sig sig = file.MethodDef.getSignature();
- /* we're about to parse a MethodDefSig, defined in Sec. 23.2.1 of Partition II () */
-
- int callConv = sig.readByte();
- // TODO decode HASTHIS from high byte of calling convention
- // TODO decode EXPLICITTHIS from high byte of calling convention
- // TODO handle VARARG calling convention (not CLS but may show up )
- if((callConv & 0x1F) == Signature.GENERIC) {
- int genParamCount = sig.decodeInt();
- /* genParamCount is ignored because the method's type params will be obtained below
- (see: file.GenericParam.getMVarIdxes(row) ) */
- }
- int paramCount = sig.decodeInt();
- Type retType = sig.decodeRetType();
- Type[] paramType = new Type[paramCount];
- for (int i = 0; i < paramCount; i++)
- paramType[i] = sig.decodeParamType();
-
- ParameterInfo[] params = new ParameterInfo[paramCount];
- int paramListBeg = file.MethodDef.ParamList;
- int paramListEnd = paramListBeg + paramCount;
- if (paramListEnd > file.ParamDef.rows) {
- /* don't try to read param names past ParamDef's row count
- Some assembly-writers don't bother to give names for all params. */
- paramListEnd = file.ParamDef.rows + 1;
- }
- for (int i = paramListBeg; i < paramListEnd; i++) {
- int pattr = file.ParamDef(i).Flags;
- String paramName = file.ParamDef.getName();
- int seq = file.ParamDef.Sequence;
- if (seq == 0) {
- //System.out.println("Retval attributes 0x" +
- // PEFile.short2hex(pattr));
- } else {
- params[seq - 1] = new ParameterInfo(paramName, paramType[seq - 1], pattr, seq - 1);
- }
- }
- for (int i = 0; i < params.length; i++) {
- if (params[i] == null)
- params[i] = new ParameterInfo(null, paramType[i], 0, 0);
- }
- MethodBase method = null;
- if ((attrs & MethodAttributes.SpecialName) != 0
- && (attrs & MethodAttributes.RTSpecialName) != 0
- && (name.equals(ConstructorInfo.CTOR)
- || name.equals(ConstructorInfo.CCTOR)))
- {
- method = new PEConstructorInfo(row, attrs, params);
- }
- else {
- method = new PEMethodInfo(row, name, attrs, retType, params);
- int[] mvarIdxes = file.GenericParam.getMVarIdxes(row);
- // if(mvarIdxes.length > 0) { System.out.println("Method: " + method); }
- for(int i = 0; i < mvarIdxes.length; i++) {
- GenericParamAndConstraints mvarAndConstraints = pemodule.getTypeConstraints(mvarIdxes[i]);
- // add mvarAndConstraints as i-th MVar in method
- ((PEMethodInfo)method).addMVar(mvarAndConstraints);
- }
- }
- (method.IsConstructor() ? constrs : methods).add(method);
- methoddefs[row - methodListBeg] = method;
- }
-
- this.constructors = (ConstructorInfo[])
- constrs.toArray(ConstructorInfo.EMPTY_ARRAY);
- this.methods = (MethodInfo[])
- methods.toArray(MethodInfo.EMPTY_ARRAY);
- constrs.clear(); methods.clear();
- }
-
- protected void loadProperties() {
- final PropertyMap pmap = file.PropertyMap;
- if (pmap == null) {
- properties = PropertyInfo.EMPTY_ARRAY;
- return;
- }
-
- final PropertyDef pdef = file.PropertyDef;
- int propListBeg = -1;
- int propListEnd = pdef.rows + 1;
- for (int i = 1; i <= pmap.rows; i++) {
- pmap.readRow(i);
- if (pmap.Parent == this.definingRow) {
- propListBeg = pmap.PropertyList;
- if (i < pmap.rows) {
- pmap.readRow(i + 1);
- propListEnd = pmap.PropertyList;
- }
- break;
- }
- }
- if (propListBeg < 0) {
- properties = PropertyInfo.EMPTY_ARRAY;
- return;
- }
-
- final ArrayList properties = new ArrayList();
- for (int i = propListBeg; i < propListEnd; i++) {
- pdef.readRow(i);
- Sig sig = pdef.getSignature();
- int b = sig.readByte();
- b &= ~HASTHIS;
- int paramCount = sig.readByte();
- assert b == PROPERTY;
- Type propType = sig.decodeType();
- int index = Table.encodeIndex(i, Table._HasSemantics,
- Table.PropertyDef.ID);
- MethodSemantics msem = file.MethodSemantics;
- MethodInfo getter = null, setter = null;
- for (int j = 1; j <= msem.rows; j++) {
- msem.readRow(j);
- if (msem.Association != index)
- continue;
- if (msem.isGetter())
- getter = getMethod(msem.Method);
- else if (msem.isSetter())
- setter = getMethod(msem.Method);
- else
- System.err.println("PEType.loadProperties(): !?!");
- }
- properties.add
- (new PEPropertyInfo(i, pdef.getName(), (short)pdef.Flags,
- propType, getter, setter));
- }
- this.properties = (PropertyInfo[]) properties
- .toArray(PropertyInfo.EMPTY_ARRAY);
- }
-
- protected void loadEvents() {
- EventMap emap = file.EventMap;
- if (emap == null) {
- this.events = EventInfo.EMPTY_ARRAY;
- return;
- }
-
- final EventDef edef = file.EventDef;
- int eventListBeg = -1;
- int eventListEnd = edef.rows + 1;
- for (int i = 1; i <= emap.rows; i++) {
- emap.readRow(i);
- if (emap.Parent == this.definingRow) {
- eventListBeg = emap.EventList;
- if (i < emap.rows) {
- emap.readRow(i + 1);
- eventListEnd = emap.EventList;
- }
- break;
- }
- }
- if (eventListBeg < 0) {
- this.events = EventInfo.EMPTY_ARRAY;
- return;
- }
-
- final ArrayList events = new ArrayList();
- final MethodSemantics msem = file.MethodSemantics;
- for (int i = eventListBeg; i < eventListEnd; i++) {
- edef.readRow(i);
- final Type handler =
- ((PEModule)Module).getTypeDefOrRef(edef.EventType);
- int index =
- Table.encodeIndex(i, Table._HasSemantics, Table.EventDef.ID);
- MethodInfo add = null, remove = null;
- for (int j = 1; j <= msem.rows; j++) {
- msem.readRow(j);
- if (msem.Association != index)
- continue;
- if (msem.isAddOn())
- add = getMethod(msem.Method);
- else if (msem.isRemoveOn())
- remove = getMethod(msem.Method);
- else {
- }
- }
- events.add(new PEEventInfo(i, edef.getName(),
- (short)edef.EventFlags,
- handler, add, remove));
- }
- this.events = (EventInfo[]) events
- .toArray(EventInfo.EMPTY_ARRAY);
- }
-
- protected void loadNestedTypes() {
- final ArrayList nested = new ArrayList();
- for (int i = 1; i <= file.NestedClass.rows; i++) {
- file.NestedClass.readRow(i);
- if (file.NestedClass.EnclosingClass == this.definingRow)
- nested.add(((PEModule)Module)
- .getTypeDef(file.NestedClass.NestedClass));
- }
- this.nestedTypes = (Type[]) nested.toArray(Type.EmptyTypes);
- }
-
- protected void loadInterfaces() {
- // get the interfaces implemented by this class
- interfaces = Type.EmptyTypes;
- int index = file.InterfaceImpl.findType(definingRow);
- if (index > 0) {
- ArrayList ifaces = new ArrayList();
- for (int i = index; i <= file.InterfaceImpl.rows; i++) {
- file.InterfaceImpl.readRow(i);
- if (file.InterfaceImpl.Class != definingRow)
- break;
- ifaces.add(((PEModule)Module)
- .getTypeDefOrRef(file.InterfaceImpl.Interface));
- }
- interfaces = (Type[]) ifaces.toArray(new Type[ifaces.size()]);
- }
- }
-
- protected void loadCustomAttributes(Type attributeType) {
- initAttributes(this, definingRow, Table.TypeDef.ID, attributeType);
- }
-
- private void initAttributes(CustomAttributeProvider cap, int definingRow,
- int sourceTableId, Type attributeType)
- {
- ((PEModule)this.Module).initAttributes
- (cap, definingRow, sourceTableId, attributeType);
- }
-
- //##########################################################################
-
- private class PEFieldInfo extends FieldInfo {
- private final int definingRow;
- public PEFieldInfo(int definingRow, String name,
- int attrs, PECustomMod pecmod, Object value)
- {
- super(name, PEType.this, attrs, pecmod, value);
- this.definingRow = definingRow;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.FieldDef.ID, attributeType);
- }
- }
-
- private class PEMethodInfo extends MethodInfo {
- private final int definingRow;
- public PEMethodInfo(int row, String name,
- int attrs, Type retType, ParameterInfo[] params)
- {
- super(name, PEType.this, attrs, retType, params);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.MethodDef.ID, attributeType);
- }
- }
-
- private class PEConstructorInfo extends ConstructorInfo {
- private final int definingRow;
- public PEConstructorInfo(int row, int attrs, ParameterInfo[] params) {
- super(PEType.this, attrs, params);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.MethodDef.ID, attributeType);
- }
- }
-
- private class PEPropertyInfo extends PropertyInfo {
- private final int definingRow;
- public PEPropertyInfo(int row, String name, short attrs, Type propType,
- MethodInfo getter, MethodInfo setter)
- {
- super(name, PEType.this, attrs, propType, getter, setter);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.PropertyDef.ID, attributeType);
- }
- }
-
- private class PEEventInfo extends EventInfo {
- private final int definingRow;
- public PEEventInfo(int row, String name, short attrs, Type handler,
- MethodInfo add, MethodInfo remove)
- {
- super(name, PEType.this, attrs, handler, add, remove);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.EventDef.ID, attributeType);
- }
- }
-
- //##########################################################################
-
-} // class PEType
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
deleted file mode 100644
index d4360363fc..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Defines the attributes that may be associated with a parameter.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class ParameterAttributes {
-
- // just to make the class uninstantiable
- private ParameterAttributes() {}
-
- //##########################################################################
-
- /** Specifies that there is no parameter attribute. */
- public static final short None = 0x0000;
-
- /** Specifies that the parameter is an input parameter. */
- public static final short In = 0x0001;
-
- /** Specifies that the parameter is an output parameter. */
- public static final short Out = 0x0002;
-
- /** Specifies that the parameter is a locale identifier. */
- public static final short Lcid = 0x0004;
-
- /** Specifies that the parameter is a return value. */
- public static final short Retval = 0x0008;
-
- /** Specifies that the parameter is optional.
- * Attention: In the specification the value is 0x0004 but
- * in mscorlib.dll that it Lcid and Optional is 0x0010
- */
- public static final short Optional = 0x0010;
-
- /** Specifies that the parameter has a default value. */
- public static final short HasDefault = 0x1000;
-
- /** Specifies that the parameter has field marshaling information. */
- public static final short HasFieldMarshal = 0x2000;
-
- /** Reserved. */
- public static final short Reserved3 = 0x4000;
-
- /** Reserved. */
- public static final short Reserved4 = (short)0x8000;
-
- /** Specifies that the parameter is reserved. */
- public static final short ReservedMask = (short)0xf000;
-
- /** Reserved: shall be zero in all conforming implementations. */
- public static final short Unused = (short) 0xcfe0;
-
- public static final String toString(int attrs) {
- StringBuffer s = new StringBuffer();
- if ((attrs & In) != 0) s.append("in ");
- if ((attrs & Out) != 0) s.append("out ");
- if ((attrs & Optional) != 0) s.append("opt ");
- if ((attrs & HasDefault) != 0) s.append("default(???) ");
- if ((attrs & HasFieldMarshal) != 0) s.append("marshal(???) ");
- return s.toString();
- }
-
- //##########################################################################
-
-} // class ParameterAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
deleted file mode 100644
index 877d7aa8a5..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a parameter and provides access to
- * parameter metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class ParameterInfo extends CustomAttributeProvider {
-
- //##########################################################################
-
- /** Attributes of the parameter. */
- public final short Attributes;
-
- /** Name of the parameter. */
- public final String Name;
-
- /** Type of the parameter. */
- public final Type ParameterType;
-
- /** Position of the parameter in the parameter list. */
- public final int Position;
-
- //##########################################################################
-
- /** Is this an input parameter? */
- public final boolean IsIn() {
- return (Attributes & ParameterAttributes.In) != 0;
- }
-
- /** Is this an output parameter? */
- public final boolean IsOut() {
- return (Attributes & ParameterAttributes.Out) != 0;
- }
-
- /** Is this an Lcid? */
- public final boolean IsLcid() {
- return (Attributes & ParameterAttributes.Lcid) != 0;
- }
-
- /** Is this a return value? */
- public final boolean IsRetval() {
- return (Attributes & ParameterAttributes.Retval) != 0;
- }
-
- /** Is this an optional parameter? */
- public final boolean IsOptional() {
- return (Attributes & ParameterAttributes.Optional) != 0;
- }
-
- //##########################################################################
- // members not part of the public Reflection.ParameterInfo interface
-
- /** Initializes a new instance of the ParameterInfo class. */
- protected ParameterInfo(String name, Type type, int attr, int pos) {
- Name = name;
- ParameterType = type;
- Attributes = (short)attr;
- Position = pos;
- }
-
- public String toString() {
- return ParameterAttributes.toString(Attributes) + ParameterType + " "
- + Name;
- }
-
- //##########################################################################
-
-} // class ParameterInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java b/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java
deleted file mode 100644
index b19fe29869..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java
+++ /dev/null
@@ -1,62 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-public final class PrimitiveType extends Type {
- public PrimitiveType(Module module,
- int attributes,
- String fullName,
- Type baseType,
- Type[] interfaces,
- Type declType,
- int auxAttr,
- Type elemType) {
- super(module, attributes, fullName,
- baseType, interfaces, declType, auxAttr, elemType);
- clearMembers();
- }
-
- public void clearMembers() {
- fields = FieldInfo.EMPTY_ARRAY;
- methods = MethodInfo.EMPTY_ARRAY;
- constructors = ConstructorInfo.EMPTY_ARRAY;
- events = EventInfo.EMPTY_ARRAY;
-
- initBaseType();
- initInterfaces();
-
- initFields();
- initMethods();
- initEvents();
- initProperties();
- initNestedTypes();
- }
-
- public FieldInfo addField(String name, int attrs, Type fieldType) {
- PECustomMod fieldTypeWithMods = new PECustomMod(fieldType, null);
- FieldInfo res = new FieldInfo(name, this, attrs, fieldTypeWithMods, null);
- FieldInfo[] ms = new FieldInfo[fields.length + 1];
- System.arraycopy(fields, 0, ms, 0, fields.length);
- ms[ms.length - 1] = res;
- fields = ms;
- return res;
- }
-
- public MethodInfo addMethod(String name, int attrs, Type returnType, Type[] paramTypes) {
- MethodInfo res = new MethodInfo(name, this, attrs, returnType, paramTypes);
- MethodInfo[] ms = new MethodInfo[methods.length + 1];
- System.arraycopy(methods, 0, ms, 0, methods.length);
- ms[ms.length - 1] = res;
- return res;
- }
-
- public ConstructorInfo addConstructor(int attrs, Type[] paramTypes) {
- ConstructorInfo res = new ConstructorInfo(this, attrs, paramTypes);
- ConstructorInfo[] ms = new ConstructorInfo[constructors.length + 1];
- System.arraycopy(constructors, 0, ms, 0, constructors.length);
- ms[ms.length - 1] = res;
- return res;
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
deleted file mode 100644
index b1bec64aff..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Attributes applcicable to properties
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PropertyAttributes {
-
- // makes the class uninstantiable
- private PropertyAttributes() {}
-
- //##########################################################################
-
- /** Specifies that the property is special, with the name describing
- * how the property is special.
- */
- public static final short SpecialName = 0x0200;
-
- /** Specifies that the metadata internal APIs check the name encoding.
- */
- public static final short RTSpecialName = 0x0400;
-
- /** Specifies that the property has a default value.
- */
- public static final short HasDefault = 0x1000;
-
- //##########################################################################
-
- public static String toString(short attrs) {
- StringBuffer str = new StringBuffer();
- if ((attrs & SpecialName) != 0) str.append("specialname ");
- if ((attrs & RTSpecialName) != 0) str.append("rtspecialname ");
- return str.toString();
- }
-
- //##########################################################################
-
-} // class PropertyAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
deleted file mode 100644
index 4b7cef8bc1..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a property
- * and provides access to property metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class PropertyInfo extends MemberInfo {
-
- //##########################################################################
-
- public final int MemberType() { return MemberTypes.Property; }
-
- public final short Attributes;
-
- public final boolean CanRead;
-
- public final boolean CanWrite;
-
- public final Type PropertyType;
-
- /** Returns an array of the public get and set accessors for this property.
- */
- public MethodInfo[] GetAccessors() {
- return GetAccessors(false);
- }
-
- /** Returns an array of the public or non-public <b>get</b>
- * and <b>set</b> accessors for this property.
- */
- public MethodInfo[] GetAccessors(boolean nonPublic) {
- MethodInfo getter = GetGetMethod(nonPublic);
- MethodInfo setter = GetSetMethod(nonPublic);
- if (getter == null)
- if (setter == null) return MethodInfo.EMPTY_ARRAY;
- else return new MethodInfo[]{setter};
- else if (setter == null) return new MethodInfo[] {getter};
- else return new MethodInfo[] {getter, setter};
- }
-
- /** Returns the public <b>get</b> accessor for this property.
- */
- public MethodInfo GetGetMethod() {
- return GetGetMethod(false);
- }
-
- /** Returns the public or non-public <b>get</b> accessor for this property.
- */
- public MethodInfo GetGetMethod(boolean nonPublic) {
- return nonPublic ? getter
- : getter == null || getter.IsPublic() ? getter : null;
- }
-
- /** Returns the public <b>set</b> accessor for this property.
- */
- public MethodInfo GetSetMethod() {
- return GetSetMethod(false);
- }
-
- /** Returns the public or non-public <b>set</b> accessor for this property.
- */
- public MethodInfo GetSetMethod(boolean nonPublic) {
- return nonPublic ? setter
- : setter == null || setter.IsPublic() ? setter : null;
- }
-
- public String toString() {
- MethodInfo m = getter != null ? getter : setter;
- return MethodAttributes.accessFlagsToString
- ((getter != null ? getter : setter).Attributes)
- + " " + PropertyAttributes.toString(Attributes)
- + DeclaringType + "::" + Name;
- }
-
- //##########################################################################
- // protected members
-
- protected static final PropertyInfo[] EMPTY_ARRAY = new PropertyInfo[0];
-
- protected MethodInfo getter;
- protected MethodInfo setter;
-
- protected PropertyInfo(String name, Type declType, short attr,
- Type propType, MethodInfo getter, MethodInfo setter)
- {
- super(name, declType);
- Attributes = attr;
- PropertyType = propType;
- this.getter = getter;
- this.setter = setter;
- CanRead = getter != null;
- CanWrite = setter != null;
- }
-
- //##########################################################################
-
-} // class PropertyInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Type.java b/src/msil/ch/epfl/lamp/compiler/msil/Type.java
deleted file mode 100644
index 830632ce45..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Type.java
+++ /dev/null
@@ -1,1142 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.Arrays;
-
-/**
- * Represents type declarations: class types, interface types, array types,
- * value types, and enumeration types.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Type extends MemberInfo {
-
- private java.util.List /* GenericParamAndConstraints */ tVars = new java.util.LinkedList();
- private GenericParamAndConstraints[] sortedTVars = null;
-
- public void addTVar(GenericParamAndConstraints tvarAndConstraints) {
- sortedTVars = null;
- tVars.add(tvarAndConstraints);
- }
-
- public GenericParamAndConstraints[] getSortedTVars() {
- if(sortedTVars == null) {
- sortedTVars = new GenericParamAndConstraints[tVars.size()];
- for (int i = 0; i < sortedTVars.length; i ++){
- Iterator iter = tVars.iterator();
- while(iter.hasNext()) {
- GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next();
- if(tvC.Number == i) {
- sortedTVars[i] = tvC;
- }
- }
- }
- }
- return sortedTVars;
- }
-
-
- //##########################################################################
- // public static members
-
- /** Empty array of type Type. */
- public static final Type[] EmptyTypes = new Type[0];
-
- /** Separates names in the namespace of the Type. */
- public static final char Delimiter = '.';
-
- //##########################################################################
- // public properties
-
- /** The fully qualified name of the Type. */
- public final String FullName;
-
- /** The namespace of the Type. */
- public final String Namespace;
-
- /** The type from which the current Type directly inherits. */
- public final Type BaseType() {
- initBaseType();
- return baseType;
- }
- protected Type baseType;
-
- /** The attributes associated with the Type. */
- public final int Attributes;
-
- /** The sssembly that the type is declared in. */
- public final Assembly Assembly() { return Module.Assembly; }
-
- /** The module (the EXE/DLL) in which the current Type is defined. */
- public final Module Module;
-
- public final int MemberType() {
- return DeclaringType == null
- ? MemberTypes.TypeInfo : MemberTypes.NestedType;
- }
-
- //##########################################################################
- // internal members
-
- // Fields declared by this class
- protected FieldInfo[] fields;
-
- // Methods declared by this class
- protected MethodInfo[] methods;
-
- // Constructors of this class
- protected ConstructorInfo[] constructors;
-
- // Properties of the class
- protected PropertyInfo[] properties;
-
- // Events of the class
- protected EventInfo[] events;
-
- // Interfaces implemented by this class
- protected Type[] interfaces;
-
- // Nested types declared by this class
- protected Type[] nestedTypes;
-
- // holds the element type of array, pointer and byref types
- private final Type elemType;
-
- // the underlying type of an enumeration. null if the type is not enum.
- protected Type underlyingType;
-
- protected int auxAttr;
-
- //##########################################################################
- // Map with all the types known so far and operations on it
-
- private static final Map types = new HashMap();
-
- protected static Type getType(String name) {
- return (Type) types.get(name);
- }
-
- protected static Type addType(Type t) {
- assert(!(t instanceof TMVarUsage));
- assert(!(t instanceof ConstructedType));
- Type oldType = (Type) types.put(t.FullName, t);
-// if (oldType != null)
-// throw new RuntimeException("The type: [" + t.Assembly + "]" + t
-// + " replaces the type: [" +
-// oldType.Assembly + "]" + oldType);
- return t;
- }
-
- //##########################################################################
-
- /** The main constructor. */
- protected Type(Module module,
- int attr,
- String fullName,
- Type baseType,
- Type[] interfaces,
- Type declType,
- int auxAttr,
- Type elemType)
- {
- super(fullName.lastIndexOf(Delimiter) < 0 ? fullName :
- fullName.substring(fullName.lastIndexOf(Delimiter) + 1,
- fullName.length()),
- declType);
-
- Module = module; // null only for TMVarUsage and for PrimitiveType
- Attributes = attr;
- this.baseType = baseType;
- if (DeclaringType == null) {
- FullName = fullName;
- int i = FullName.lastIndexOf(Delimiter);
- Namespace = (i < 0) ? "" : FullName.substring(0,i);
- } else {
- FullName = declType.FullName + "+" + fullName;
- Namespace = DeclaringType.Namespace;
- }
-
- this.interfaces = interfaces;
- this.elemType = elemType;
- this.auxAttr = auxAttr;
- }
-
- public final boolean IsAbstract() {
- return (Attributes & TypeAttributes.Abstract) != 0;
-
- }
- public final boolean IsPublic() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.Public;
- }
-
- public final boolean IsNotPublic() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NotPublic;
- }
-
- public final boolean IsNestedPublic() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedPublic;
- }
-
- public final boolean IsNestedPrivate() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedPrivate;
- }
-
- public final boolean IsNestedFamily() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedFamily;
- }
-
- public final boolean IsNestedAssembly() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedAssembly;
- }
-
- public final boolean IsNestedFamORAssem() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedFamORAssem;
- }
-
- public final boolean IsNestedFamANDAssem() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedFamANDAssem;
- }
-
- public final boolean IsSealed() {
- return (Attributes & TypeAttributes.Sealed) != 0;
- }
-
- public final boolean IsSpecialName() {
- return (Attributes & TypeAttributes.SpecialName) != 0;
- }
-
- public final boolean IsClass() {
- return (Attributes & TypeAttributes.ClassSemanticsMask)
- == TypeAttributes.Class;
- }
-
- public final boolean IsInterface(){
- return (Attributes & TypeAttributes.ClassSemanticsMask)
- == TypeAttributes.Interface;
- }
-
- public final boolean IsAutoLayout() {
- return (Attributes & TypeAttributes.LayoutMask)
- == TypeAttributes.AutoLayout;
- }
- public final boolean IsExplictitLayout() {
- return (Attributes & TypeAttributes.LayoutMask)
- == TypeAttributes.ExplicitLayout;
- }
- public final boolean IsLayoutSequential() {
- return (Attributes & TypeAttributes.LayoutMask)
- == TypeAttributes.SequentialLayout;
- }
-
- public final boolean IsImport() {
- return (Attributes & TypeAttributes.Import) != 0;
- }
- public final boolean IsSerializable() {
- return (Attributes & TypeAttributes.Serializable) != 0;
- }
-
- public final boolean IsAnsiClass() {
- return (Attributes & TypeAttributes.StringFormatMask)
- == TypeAttributes.AnsiClass;
- }
-
- public final boolean IsUnicodeClass() {
- return (Attributes & TypeAttributes.StringFormatMask)
- == TypeAttributes.UnicodeClass;
- }
- public final boolean IsAutoClass() {
- return (Attributes & TypeAttributes.StringFormatMask)
- == TypeAttributes.AutoClass;
- }
-
- public final boolean IsArray() {
- return (auxAttr & AuxAttr.Array) != 0;
- }
- public final boolean IsByRef() {
- return (auxAttr & AuxAttr.ByRef) != 0;
- }
- public final boolean IsPointer() {
- return (auxAttr & AuxAttr.Pointer) != 0;
- }
- public final boolean IsPrimitive() {
- return (auxAttr & AuxAttr.Primitive) != 0;
- }
- public final boolean IsValueType() {
- return BaseType() == VALUE_TYPE() || IsEnum();
- }
- public final boolean IsEnum() {
- return BaseType() == ENUM();
- }
- public boolean CanBeTakenAddressOf() {
- /* TODO should be overridden in TMVarUsage,
- but there's currently no way to bind a TMVarUsage to its GenericParamAndConstraints definition. Why?
- Because of the way the msil library is organized (e.g., mkArray() returns the same !0[] representation
- for all !0[] usages, irrespective of the scope of the !0 type-param)
- This in turn is so because without generics there's no harm in using a type-def instance
- where a type-ref should go (e.g., the ParameterType of a ParameterInfo nowadays may point to a PEType).
- The net effect is that this method (CanBeTakenAddressOf) is conservative, it will answer "no"
- for example for !0 where !0 refers to a type-param with the isValuetype constraint set.
- The whole thing is ok at this point in time, where generics are not supported at the backend. */
- return IsValueType() && (this != ENUM());
- /* ENUM() is a singleton, i.e. System.Enum is not generic */
- }
-
- /** IsGeneric, true for a PEType or TypeBuilder (i.e., a type definition)
- * containing one or more type params. Not to be called on a reference
- * to a constructed type. */
- public final boolean IsGeneric() {
- return tVars.size() > 0;
- }
-
- public final boolean HasElementType() {
- return IsArray() || IsPointer() || IsByRef();
- }
-
- public boolean IsTMVarUsage() {
- // overridden in TMVarUsage
- return false;
- }
-
- public boolean IsNestedType() {
- return DeclaringType != null;
- }
-
- public boolean IsDefinitelyInternal() {
- if(IsNestedType()) {
- return IsNestedPrivate();
- } else {
- return IsNotPublic();
- }
- }
-
- //public final boolean IsCOMObject;
- //public final boolean IsContextful;
- //public final boolean IsMarshalByRef;
-
- protected Type(Module module,
- int attr,
- String fullName,
- Type baseType,
- Type[] interfaces,
- Type declType,
- int auxAttr)
- {
- this(module, attr, fullName, baseType, interfaces,
- declType, auxAttr, null);
- }
-
- //##########################################################################
-
- public static final class TMVarUsage extends Type {
-
- public final int Number;
- public final boolean isTVar;
-
- /** Non-defining reference to either a TVar or an MVar.
- * An instance of GenericParamAndConstraints represents a TVar or an MVar definition. */
- public TMVarUsage(int Number, boolean isTVar) {
- super(null, 0, ((isTVar ? "!" : "!!") + Number), null, null, null, AuxAttr.None, null);
- this.Number = Number;
- this.isTVar = isTVar;
- }
-
- public String toString() {
- return (isTVar ? "!" : "!!") + Number;
- }
-
- public final boolean IsTMVarUsage() {
- return true;
- }
-
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
-
- TMVarUsage that = (TMVarUsage) o;
-
- if (Number != that.Number) return false;
- if (isTVar != that.isTVar) return false;
-
- return true;
- }
-
- public int hashCode() {
- int result = Number;
- result = 31 * result + (isTVar ? 1 : 0);
- return result;
- }
- }
-
- protected static final class AuxAttr {
- public static final int None = 0x0000;
- public static final int Array = 0x0001;
- public static final int ByRef = 0x0002;
- public static final int Pointer = 0x0008;
- public static final int Primitive = 0x0010;
- }
-
- /***/
- public static Type mkArray(Type elemType, int rank) {
- StringBuffer arrSig = new StringBuffer("[");
- for (int i = 0; i < rank; i++) {
- if (i > 0) arrSig.append(',');
- }
- arrSig.append(']');
- Type array = getType(elemType.FullName + arrSig);
- if (array != null)
- return array;
- array = new PrimitiveType(elemType.Module,
- elemType.Attributes
- | TypeAttributes.Sealed
- | TypeAttributes.Serializable,
- elemType.FullName + arrSig,
- ARRAY(), EmptyTypes, null,
- AuxAttr.Array, elemType);
- return addType(array);
- }
-
- /***/
- public static Type mkPtr(Type elemType) {
- String name = elemType.FullName + "*";
- Type type = getType(name);
- if (type != null) return type;
- type = new PrimitiveType(elemType.Module,
- elemType.Attributes,
- name, null, EmptyTypes, null,
- AuxAttr.Pointer, elemType);
- return addType(type);
- }
-
- /***/
- public static Type mkByRef(Type elemType) {
- String name = elemType.FullName + "&";
- Type type = getType(name);
- if (type != null) return type;
- type = new PrimitiveType(elemType.Module,
- elemType.Attributes,
- name, null, EmptyTypes, null,
- AuxAttr.ByRef, elemType);
- return addType(type);
- }
-
- //##########################################################################
- // public methods
-
- /**
- * Return the type with the specified signature parameters.
- * For example, the fully qualified name for a class might look like this:
- * TopNamespace.SubNameSpace.ContainingClass+NestedClass,MyAssembly
- */
- public static Type GetType(String fullName) {
- Type type = getType(fullName);
- if (type != null) return type;
-
- // check if it's an array type; TODO: make array type handling more robust
- int i = fullName.lastIndexOf('[');
- int j = fullName.lastIndexOf(']');
- if (i >= 0)
- if (j > i && j == (fullName.length() - 1)) {
- String elementTypeName = fullName.substring(0, i);
- Type elementType = GetType(elementTypeName);
- if (elementType == null)
- throw new RuntimeException
- ("Unknown element type '" + elementTypeName +
- "' for the array type: " + fullName);
- int rank = j - i;
- for (int k = i + 1; k < j; k++) {
- if (fullName.charAt(k) != ',')
- throw new RuntimeException
- ("Malformed type name: " + fullName);
- }
- return mkArray(elementType, rank);
- } else
- throw new RuntimeException("Malformed type name: " + fullName);
-
- // check if it's a pointer type
- if (fullName.charAt(fullName.length() - 1) == '*')
- return addType
- (mkPtr(GetType(fullName.substring(0, fullName.length()-1))));
-
- // check if it's a nested class
- i = fullName.lastIndexOf('+');
- if (i > 0) {
- if (i == 0 || i == (fullName.length() - 1))
- throw new RuntimeException("malformedTypeName");
- Type enclosing = GetType(fullName.substring(0, i));
- return enclosing == null ? null
- : enclosing.GetNestedType(fullName.substring(i + 1));
- }
-
- //System.out.println("Looking for type: " + fullName + " (" + fullName.length() + ")");
- // try in the assemblies
- Iterator assems = ch.epfl.lamp.compiler.msil.Assembly.
- assemblies.values().iterator();
- while (type == null && assems.hasNext()) {
- Assembly assem = ((Assembly) assems.next());
- type = assem.GetType(fullName);
- //System.out.println("\tin assemby " + assem + " -> " + type);
- }
-
- Type type2 = getType(fullName);
- if (type == type2) return type;
- return type == null ? null : addType(type);
- }
-
- /**
- * @return the type of the object encompassed or referenced to
- * by the current array, pointer or reference type.
- */
- public Type GetElementType() {
- return elemType;
- }
-
- /**
- * @return the type underlying an enumeration type.
- */
- public Type getUnderlyingType() {
- if (!IsEnum()) return null;
- // this would force the loading of the underlying type from the
- // the type of the value__ field of the enumeration
- initFields();
- return underlyingType;
- }
-
- //##########################################################################
- // GetField/s/
-
- /** Searches for the field with the specified name. */
- public FieldInfo GetField(String name) {
- initFields();
- for (int i = 0; i < fields.length; i++)
- if (fields[i].Name.equals(name) && !fields[i].IsPrivate())
- return fields[i];
- return null;
- }
-
- /**
- */
- public FieldInfo GetField(String name, int bindingFlags) {
- FieldInfo[] fields = this.GetFields(bindingFlags);
- for (int i = 0; i < fields.length; i++)
- if (name.equals(fields[i].Name))
- return fields[i];
- return null;
- }
-
- /** Gets the fields of the current Type. */
- public FieldInfo[] GetFields() {
- return GetFields(BindingFlags.Instance | BindingFlags.Public);
- }
-
- /**
- */
- public FieldInfo[] GetFields(int bindingFlags) {
- initFields();
- final FieldInfo[] fields =
- getAllFields((bindingFlags & BindingFlags.DeclaredOnly) != 0);
- final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
- final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
- final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
- final boolean getNonPublic =
- (bindingFlags & BindingFlags.NonPublic) != 0;
-
- int cnt = 0;
- for (int i = 0; i < fields.length; i++) {
- FieldInfo field = fields[i];
- boolean accessible = (getPublic && field.IsPublic())
- || (getNonPublic && !field.IsPublic());
- if (accessible
- // strip off the private fields up the hierarchy
- && ((field.DeclaringType == this)
- || ((field.DeclaringType != this) && !field.IsPrivate()))
- && ((getInstance && !field.IsStatic())
- || ((getStatic && field.IsStatic()) &&
- (field.DeclaringType == this
- || (bindingFlags & BindingFlags.FlattenHierarchy) != 0))
- )
- )
- fields[cnt++] = field;
- }
- FieldInfo [] resFields = new FieldInfo[cnt];
- System.arraycopy(fields, 0, resFields, 0, cnt);
- return resFields;
- }
-
- protected FieldInfo[] getAllFields(boolean declaredOnly) {
- initFields();
- FieldInfo [] inherited = BaseType() == null || declaredOnly
- ? FieldInfo.EMPTY_ARRAY
- : BaseType().getAllFields(declaredOnly);
- FieldInfo[] allFields =
- new FieldInfo[inherited.length + this.fields.length];
- System.arraycopy(inherited, 0, allFields, 0, inherited.length);
- System.arraycopy(this.fields, 0,
- allFields, inherited.length, this.fields.length);
- return allFields;
- }
-
- //##########################################################################
- // GetConstructor/s/
-
- /** Searches for a public instance constructor whose parameters
- * match the types in the specified array. */
- public ConstructorInfo GetConstructor(Type[] paramTypes) {
- initMethods();
- for (int i = 0; i < constructors.length; i++) {
- if (equalParameters(constructors[i].GetParameters(), paramTypes))
- return constructors[i];
- }
- return null;
- }
-
- /** Returns all public instance constructors defined for the current Type.*/
- public ConstructorInfo[] GetConstructors() {
- return GetConstructors(BindingFlags.Instance | BindingFlags.Public);
- }
-
- /***/
- public ConstructorInfo[] GetConstructors(int bindingFlags) {
- initMethods();
- final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
- final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
- final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
- final boolean getNonPublic =
- (bindingFlags & BindingFlags.NonPublic) != 0;
-
- ConstructorInfo[] constrs =
- new ConstructorInfo[this.constructors.length];
- int cnt = 0;
- for (int i = 0; i < this.constructors.length; i++) {
- ConstructorInfo constr = this.constructors[i];
- boolean accessible = (getPublic && constr.IsPublic())
- || (getNonPublic && !constr.IsPublic());
- if (accessible
- && ((getInstance && !constr.IsStatic())
- || (getStatic && constr.IsStatic())))
- constrs[cnt++] = constr;
- }
- ConstructorInfo [] resConstrs = new ConstructorInfo[cnt];
- System.arraycopy(constrs, 0, resConstrs, 0, cnt);
- return resConstrs;
- }
-
- //##########################################################################
- // GetMethod/s/
-
- /** Searches for the specified public method whose parameters
- * match the specified argument types. */
- public MethodInfo GetMethod(String name, Type[] paramTypes) {
- return GetMethod(name, paramTypes, null);
- }
-
- public MethodInfo GetMethod(String name, Type[] paramTypes, Type retType) {
- initMethods();
- MethodInfo method = findMethod(methods, name, paramTypes, retType);
- if (method != null)
- return method;
- if (BaseType() != null) {
- method = BaseType().GetMethod(name, paramTypes, retType);
- if (method != null)
- return method;
- }
-// StringBuffer str = new StringBuffer(name);
-// str.append('(');
-// for (int i = 0; i < paramTypes.length; i++) {
-// if (i > 0) str.append(", ");
-// str.append(paramTypes[i]);
-// }
-// str.append(')');
-// System.out.println("Cannot find method " + str + ":");
-// System.out.println("Methods of class " + this);
-// for (int i = 0; i < methods.length; i++)
-// System.out.println("\t" + methods[i]);
- return null;
- }
-
- /**
- */
- protected static MethodInfo findMethod(MethodInfo[] methods,
- String name,
- Type[] paramTypes,
- Type retType)
- {
- for (int i = 0; i < methods.length; i++)
- if (name.equals(methods[i].Name)
- && equalParameters(methods[i].GetParameters(), paramTypes)
- && (retType == null || methods[i].ReturnType == retType))
- return methods[i];
- return null;
- }
-
- /**
- */
- protected static boolean equalParameters(ParameterInfo[] params,
- Type[] paramTypes)
- {
- if (params.length != paramTypes.length)
- return false;
- for (int i = 0; i < params.length; i++) {
-// System.out.println(params[i].ParameterType + " == " + paramTypes[i]
-// + " = " + (params[i].ParameterType == paramTypes[i]));
- if (params[i].ParameterType != paramTypes[i])
- return false;
- }
- return true;
- }
-
- /**
- */
- public MethodInfo GetMethod(String name, Type[] paramTypes, int bindingFlags) {
- MethodInfo[] methods = GetMethods(bindingFlags);
- MethodInfo method = findMethod(methods, name, paramTypes, null);
- if (method == null) {
- StringBuffer str = new StringBuffer(name);
- str.append('(');
- for (int i = 0; i < paramTypes.length; i++) {
- if (i > 0) str.append(", ");
- str.append(paramTypes[i]);
- }
- str.append(')');
- System.out.println("Cannot find method " + str + ":");
- System.out.println("Methods of class " + this);
- for (int i = 0; i < methods.length; i++)
- System.out.println("\t" + methods[i]);
- }
- return method;
- }
-
- /** Returns all public methods of the current Type. */
- public MethodInfo[] GetMethods() {
- return GetMethods(BindingFlags.Instance | BindingFlags.Public);
- }
-
- /**
- */
- public MethodInfo[] GetMethods(int bindingFlags) {
- initMethods();
- final MethodInfo[] methods =
- getAllMethods((bindingFlags & BindingFlags.DeclaredOnly) != 0);
- //System.out.println("" + this + ".GetMethods(int) -> " + methods.length);
- final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
- final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
- final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
- final boolean getNonPublic =
- (bindingFlags & BindingFlags.NonPublic) != 0;
-
- int cnt = 0;
- for (int i = 0; i < methods.length; i++) {
- MethodInfo method = methods[i];
- boolean accessible = (getPublic && method.IsPublic())
- || (getNonPublic && !method.IsPublic());
- if (accessible
- // strip off the private methods up the hierarchy
- && ((method.DeclaringType == this)
- || ((method.DeclaringType != this) && !method.IsPrivate()))
- && ((getInstance && !method.IsStatic())
- || ((getStatic && method.IsStatic()) &&
- (method.DeclaringType == this
- || (bindingFlags & BindingFlags.FlattenHierarchy) != 0))
- )
- )
- methods[cnt++] = method;
- }
- MethodInfo [] resMethods = new MethodInfo[cnt];
- System.arraycopy(methods, 0, resMethods, 0, cnt);
- return resMethods;
- }
-
- protected MethodInfo[] getAllMethods(boolean declaredOnly) {
- initMethods();
- MethodInfo[] inherited = BaseType() == null || declaredOnly
- ? MethodInfo.EMPTY_ARRAY
- : BaseType().getAllMethods(declaredOnly);
- MethodInfo[] allMethods =
- new MethodInfo[inherited.length + this.methods.length];
- System.arraycopy(inherited, 0, allMethods, 0, inherited.length);
- System.arraycopy(this.methods, 0,
- allMethods, inherited.length, this.methods.length);
- return allMethods;
- }
-
- //##########################################################################
- // GetProperty/ies/
-
- /** Returns all public properties of the current Type.
- */
- public PropertyInfo[] GetProperties() {
- initProperties();
- return (PropertyInfo[]) properties.clone();
- }
-
- /** Returns the properties of the current class
- * that satisfy the binding constrints.
- */
- public PropertyInfo[] GetProperties(int bindingFlags) {
- initProperties();
- return (PropertyInfo[]) properties.clone();
- }
-
- /** Returns the public property with the given name.
- */
- public PropertyInfo GetProperty(String name) {
- initProperties();
- for (int i = 0; i < properties.length; i++)
- if (name.equals(properties[i].Name))
- return properties[i];
- return null;
- }
-
- /** Returns the property with the given name
- * that satisfies the binding constraints.
- */
- public PropertyInfo GetProperty(String name, int bindingFlags) {
- throw new RuntimeException("Method not implemented yet");
- }
-
- //##########################################################################
- // GetEvent(s)
-
- public EventInfo[] GetEvents() {
- initEvents();
- return (EventInfo[]) events.clone();
- }
-
- //##########################################################################
- // GetNestedType/s/
-
- /** Searches for nested type with the specified name. */
- public Type GetNestedType(String name) {
- initNestedTypes();
- for (int i = 0; i < nestedTypes.length; i++)
- if (nestedTypes[i].Name.equals(name))
- return nestedTypes[i];
- return null;
- }
-
- /** Returns all types nested within the current Type. */
- public Type[] GetNestedTypes() {
- initNestedTypes();
- return (Type[]) nestedTypes.clone();
- }
-
- //##########################################################################
- // GetInterface/s/
-
- /** Searches for an Interface with the given name implemented by this type
- */
- public Type GetInterface(String name) {
- return GetInterface(name, false);
- }
-
- /** Searches for the specified interface,
- * specifying whether to do a case-sensitive search.
- * @param name - the name of the interface to get
- * @param ignoreCase <b>true</b> to perform a case-insensitive search for name
- * <b>false</b> to perform a case-sensitive search for name
- * @return A Type object representing the interface with the specified name,
- * implemented or inherited by the current Type, if found;
- * otherwise, a null reference
- */
- public Type GetInterface(String name, boolean ignoreCase) {
- initInterfaces();
- for (int i = 0; i < interfaces.length; i++) {
- Type iface = interfaces[i];
- if (ignoreCase) {
- if (name.equalsIgnoreCase(iface.Name)) return iface;
- if (name.equalsIgnoreCase(iface.FullName)) return iface;
- } else {
- if (name.equals(iface.Name)) return iface;
- if (name.equals(iface.FullName)) return iface;
- }
- }
- return BaseType() == null ? null
- : BaseType().GetInterface(name, ignoreCase);
- }
-
- /** Returns the interfaces implemented or inherited by the current Type. */
- public Type[] GetInterfaces() {
- initInterfaces();
- if (BaseType() == null) return interfaces;
-
- Type[] ifaces = interfaces;
- int count = 0;
- for (int i = 0; i < interfaces.length; i++) {
- if (BaseType().GetInterface(interfaces[i].FullName) == null)
- ifaces[count++] = ifaces[i];
- }
- Type[] baseTypeIfaces = BaseType().GetInterfaces();
-
- Type[] res = new Type[baseTypeIfaces.length + count];
- System.arraycopy(baseTypeIfaces, 0, res, 0, baseTypeIfaces.length);
- System.arraycopy(ifaces, 0, res, baseTypeIfaces.length, count);
-
- return res;
- }
-
-
- public boolean isSubtypeOf(Type that) {
- if (this == that || BaseType() == that || that == OBJECT()) return true;
- initInterfaces();
- for (int i = 0; i < interfaces.length; i++)
- if (interfaces[i].isSubtypeOf(that))
- return true;
- boolean res = BaseType() == null ? false : BaseType().isSubtypeOf(that);
-// if (!res) {
-// System.out.println(dumpType(this) + " not a subtype of " +
-// dumpType(that));
-// }
- return res;
- }
-
- private static String formatType(Type t) {
- if (t == null) return "<null>";
- String cname = t.getClass().getName();
- int k = cname.lastIndexOf(".");
- if (k >= 0)
- cname = cname.substring(k + 1);
- return "[" + t.Assembly().GetName() + "]" + t +
- "(" + cname + "#" + Integer.toHexString(t.hashCode()) + ")";
- }
- private static String dumpType(Type t) {
- StringBuffer str = new StringBuffer();
- str.append(formatType(t) + " : ");
- str.append(formatType(t.BaseType()));
- Type[] ifaces = t.GetInterfaces();
- for (int i = 0; i < ifaces.length; i++)
- str.append(", " + formatType(ifaces[i]));
- return str.toString();
- }
-
- //##########################################################################
- // GetMember/s/
-
- protected MemberInfo[] members;
-
- public MemberInfo[] GetMember(String name) {
- aggregateMembers();
- List l = new ArrayList();
- for (int i = 0; i < members.length; i++) {
- if (name.equals(members[i].Name))
- l.add(members[i]);
- }
- return (MemberInfo[])l.toArray(MemberInfo.EMPTY_ARRAY);
- }
-
- protected void aggregateMembers() {
- if (members != null)
- return;
- initFields();
- initMethods();
- initProperties();
- initNestedTypes();
- // the List returned by Arrays.asList doesn't support the addAll method
- // so we have to wrap it in ArrayList
- List l = new ArrayList(Arrays.asList(fields));
- l.addAll(Arrays.asList(constructors));
- l.addAll(Arrays.asList(methods));
- l.addAll(Arrays.asList(properties));
- l.addAll(Arrays.asList(nestedTypes));
- members = (MemberInfo[]) l.toArray(MemberInfo.EMPTY_ARRAY);
- }
-
- //##########################################################################
- // non-standard methods that return only members declared in this type
-
- /**
- * Return only the fields declared in this type.
- */
- public FieldInfo[] getFields() {
- initFields();
- FieldInfo[] fields = new FieldInfo[this.fields.length];
- System.arraycopy(this.fields, 0, fields, 0, fields.length);
- return fields;
- }
-
- /**
- * Return only the conrtuctors declared in this type.
- */
- public ConstructorInfo[] getConstructors() {
- initMethods();
- ConstructorInfo[] ctors = new ConstructorInfo[constructors.length];
- System.arraycopy(constructors, 0, ctors, 0, ctors.length);
- return ctors;
- }
-
- /**
- * Return only the methods declared in this type.
- */
- public MethodInfo[] getMethods() {
- initMethods();
- MethodInfo[] methods = new MethodInfo[this.methods.length];
- System.arraycopy(this.methods, 0, methods, 0, methods.length);
- return methods;
- }
-
- /**
- * Return only the properties declared in this type.
- */
- public PropertyInfo[] getProperties() {
- initProperties();
- PropertyInfo[] props = new PropertyInfo[properties.length];
- System.arraycopy(properties, 0, props, 0, props.length);
- return props;
- }
-
- /**
- * Return only the interfaces directly implemented by this type.
- */
- public Type[] getInterfaces() {
- initInterfaces();
- Type[] ifaces = new Type[interfaces.length];
- System.arraycopy(interfaces, 0, ifaces, 0, ifaces.length);
- return ifaces;
- }
-
- /**
- * Return the types declared in this type.
- */
- public Type[] getNestedTypes() {
- initNestedTypes();
- Type[] nested = new Type[nestedTypes.length];
- System.arraycopy(nestedTypes, 0, nested, 0, nested.length);
- return nested;
- }
-
- //##########################################################################
-
- public String toString() {
- return FullName;
- }
-
- //##########################################################################
- // lazy type construction members
-
- private boolean initBaseType = true;
- protected final void initBaseType() {
- if (initBaseType) {
- loadBaseType();
- initBaseType = false;
- }
- }
- protected void loadBaseType() {}
-
- private boolean initInterfaces = true;
- protected void initInterfaces() {
- if (initInterfaces) {
- loadInterfaces();
- initInterfaces = false;
- }
- assert interfaces != null : "In type " + this;
- }
- protected void loadInterfaces() {}
-
- private boolean initNestedTypes = true;
- protected void initNestedTypes() {
- if (initNestedTypes) {
- loadNestedTypes();
- initNestedTypes = false;
- }
- assert nestedTypes != null : "In type " + this;
- }
- protected void loadNestedTypes() {}
-
- private boolean initFields = true;
- protected void initFields() {
- if (initFields) {
- loadFields();
- initFields = false;
- }
- assert fields != null : "In type " + this;
- }
- protected void loadFields() {}
-
- private boolean initMethods = true;
- protected void initMethods() {
- if (initMethods) {
- loadMethods();
- initMethods = false;
- }
- assert constructors != null : "In type " + this;
- assert methods != null : "In type " + this;
- }
- protected void loadMethods() {}
-
- private boolean initProperties = true;
- protected void initProperties() {
- if (initProperties) {
- initMethods();
- loadProperties();
- initProperties = false;
- }
- assert properties != null : "In type " + this;
- }
- protected void loadProperties() {}
-
- private boolean initEvents = true;
- protected void initEvents() {
- if (initEvents) {
- initMethods();
- loadEvents();
- initEvents = false;
- }
- assert events != null : "In type " + this;
- }
- protected void loadEvents() {}
-
- //##########################################################################
-
- //##########################################################################
- // static members
-
- private static Assembly MSCORLIB;
- private static Module MSCORLIB_DLL;
-
- public static Type OBJECT() { return __OBJECT; }
- public static Type STRING() { return __STRING; }
- public static Type ARRAY() { return __ARRAY; }
- public static Type VOID() { return __VOID; }
- public static Type ENUM() { return __ENUM; }
- public static Type VALUE_TYPE() { return __VALUE_TYPE; }
-
- private static Type __OBJECT;
- private static Type __STRING;
- private static Type __ARRAY;
- private static Type __VOID;
- private static Type __ENUM;
- private static Type __VALUE_TYPE;
-
- public static void initMSCORLIB(Assembly mscorlib) {
- if (MSCORLIB != null)
- throw new RuntimeException("mscorlib already initialized");
- MSCORLIB = mscorlib;
- MSCORLIB_DLL = MSCORLIB.GetModules()[0];
-
- __OBJECT = mscorlib.GetType("System.Object");
- __STRING = mscorlib.GetType("System.String");
- __ARRAY = mscorlib.GetType("System.Array");
- __VOID = mscorlib.GetType("System.Void");
- __ENUM = mscorlib.GetType("System.Enum");
- __VALUE_TYPE = mscorlib.GetType("System.ValueType");
- }
-
- //##########################################################################
-
-} // class Type
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
deleted file mode 100644
index 8f489fa46f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies type attributes.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class TypeAttributes {
-
- //##########################################################################
- // Visibilty attributes
-
- /** Bitmask used to retrieve visibility information. */
- public static final int VisibilityMask = 0x00000007;
-
- /** Class has no public scope. */
- public static final int NotPublic = 0x00000000;
-
- /** Class has public scope. */
- public static final int Public = 0x00000001;
-
- /** Class is nested with public visibility. */
- public static final int NestedPublic = 0x00000002;
-
- /** Class is nested with private visibility. */
- public static final int NestedPrivate = 0x00000003;
-
- /** Class is nested with family visibility, and is thus accessible
- * only by methods within its own type and any subtypes. */
- public static final int NestedFamily = 0x00000004;
-
- /** Class is nested with assembly visibility, and is thus accessible
- * only by methods within its assembly. */
- public static final int NestedAssembly = 0x00000005;
-
- /** Class is nested with assembly and family visibility, and is thus accessible
- * only by methods lying in the intersection of its family and assembly. */
- public static final int NestedFamANDAssem = 0x00000006;
-
- /** Class is nested with family or assembly visibility, and is thus accessible
- * only by methods lying in the union of its family and assembly. */
- public static final int NestedFamORAssem = 0x00000007;
-
- //##########################################################################
- // Class layout attributes
-
- /** Bitmask used to retrieve class layout information. */
- public static final int LayoutMask = 0x00000018;
-
- /** Class fields are automatically laid out by the CLR. */
- public static final int AutoLayout = 0x00000000;
-
- /** Class fields are laid out sequentially, in the order that the fields
- * were emitted to the metadata. */
- public static final int SequentialLayout = 0x00000008;
-
- /** Class fields are laid out at the specified offsets. */
- public static final int ExplicitLayout = 0x00000010;
-
- //##########################################################################
- // Class semantics attributes
-
- /** Bitmask used to retrieve class semantics information. */
- public static final int ClassSemanticsMask = 0x00000020;
-
- /** Type is a class. */
- public static final int Class = 0x00000000;
-
- /** Type is an interface. */
- public static final int Interface = 0x00000020;
-
- //##########################################################################
- // Special semantics in addition to class semantics
-
- /** Class is abstract. */
- public static final int Abstract = 0x00000080;
-
- /** Class is cannot be extended. */
- public static final int Sealed = 0x00000100;
-
- /** Class is special in a way denoted by the name. */
- public static final int SpecialName = 0x00000400;
-
- //##########################################################################
- // Implementation attributes
-
- /** Class/interface is imported from another module. */
- public static final int Import = 0x00001000;
-
- /** Class can be serialized. */
- public static final int Serializable = 0x00002000;
-
- //##########################################################################
- // String formatting attributes
-
- /** Bitmask used to retrieve string information for native interop. */
- public static final int StringFormatMask = 0x00030000;
-
- /** LPTSTR is interpreted as ANSI. */
- public static final int AnsiClass = 0x00000000;
-
- /** LPTSTR is interpreted as UNICODE. */
- public static final int UnicodeClass = 0x00010000;
-
- /** LPTSTR is interpreted automatically. */
- public static final int AutoClass = 0x00020000;
-
- //##########################################################################
- // Class initialization attributes
-
- /** Initialize the class before first static field access. */
- public static final int BeforeFieldInit = 0x00100000;
-
- //##########################################################################
- // Additional flags
-
- /** CLI provides 'special' behavior, depending upon the name of the type. */
- public static final int RTSpecialName = 0x00000800;
-
- /** Type has security associate with it. */
- public static final int HasSecurity = 0x00040000;
-
- //##########################################################################
-
- public static String accessModsToString(int attrs) {
- switch (attrs & VisibilityMask) {
- case NotPublic: return "private";
- case Public: return "public";
- case NestedPublic: return "nested public";
- case NestedPrivate: return "nested private";
- case NestedFamily: return "nested family";
- case NestedAssembly: return "nested assembly";
- case NestedFamANDAssem: return "nested famandassem";
- case NestedFamORAssem: return "nested famorassem";
- default:
- throw new RuntimeException();
- }
- }
-
- /** Returns a string representation of the given attributes. */
- public static String toString(int attrs) {
- StringBuffer str = new StringBuffer(accessModsToString(attrs));
- switch (attrs & LayoutMask) {
- case AutoLayout: str.append(" auto"); break;
- case SequentialLayout: str.append(" sequential"); break;
- case ExplicitLayout: str.append(" explicit"); break;
- }
- switch (attrs & StringFormatMask) {
- case AnsiClass: str.append(" ansi"); break;
- case UnicodeClass: str.append(" unicode"); break;
- case AutoClass: str.append(" autochar"); break;
- }
- if ((attrs & Interface) != 0) str.append(" interface");
- if ((attrs & Abstract) != 0) str.append(" abstract");
- if ((attrs & Sealed) != 0) str.append(" sealed");
- if ((attrs & BeforeFieldInit) != 0) str.append(" beforefieldinit");
- if ((attrs & Serializable) != 0) str.append(" serializable");
- if ((attrs & SpecialName) != 0) str.append(" specialname");
- if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
- return str.toString();
- }
-
- /***/
- public static final boolean isNested(int attrs) {
- switch (attrs & VisibilityMask) {
- case NestedPublic:
- case NestedPrivate:
- case NestedFamily:
- case NestedAssembly:
- case NestedFamANDAssem:
- case NestedFamORAssem:
- return true;
- default: return false;
- }
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private TypeAttributes() {}
-
- //##########################################################################
-
-} // class TypeAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Version.java b/src/msil/ch/epfl/lamp/compiler/msil/Version.java
deleted file mode 100644
index ad4b09b163..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Version.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Represents the version number for a common language runtime assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class Version {
-
- //##########################################################################
- // public interface
-
- /**
- * Gets the value of the major component of the version
- * number for this instance.
- */
- public final int Major;
-
- /**
- * Gets the value of the minor component of the version
- * number for this instance.
- */
- public final int Minor;
-
- /**
- * Gets the value of the build component of the version
- * number for this instance.
- */
- public final int Build;
-
- /**
- * Gets the value of the revision component of the version
- * number for this instance.
- */
- public final int Revision;
-
- /**
- * Initializes a new instance of the Version class.
- */
- public Version() {
- this(0,0,0,0);
- }
-
- /**
- * Initializes a new instance of the Version class with
- * the specified major, minor, build, and revision numbers.
- */
- public Version(int major, int minor, int build, int revision) {
- this.Major = major;
- this.Minor = minor;
- this.Build = build;
- this.Revision = revision;
- }
-
- /**
- * Converts the value of this instance to its equivalent String representation
- */
- public String toString() {
- return "" + Major + "." + Minor + "." + Build + "." + Revision;
- }
-
- //##########################################################################
-
-} // class Version
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
deleted file mode 100644
index 3110ccd1ce..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import java.io.IOException
-
-/**
- * Defines and represents a dynamic assembly.
- * A dynamic assembly is an assembly that is created using the compiler.msil
- * emit APIs. The dynamic modules in the assembly are saved when the dynamic
- * assembly is saved using the Save method. To generate an executable, the
- * SetEntryPoint method must be called to identify the method that is the
- * entry point to the assembly. Assemblies are saved as DLL by default,
- * unless SetEntryPoint requests the generation of a console application
- * or a Windows-based application.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class AssemblyBuilder(name: AssemblyName)
- extends Assembly(name)
- with ICustomAttributeSetter
- with Visitable
-{
- //##########################################################################
- // public methods
-
- /**
- * Defines a dynamic module with the given name that will be saved
- * to the specified file. No symbol information is emitted.
- */
- def DefineDynamicModule(name: String, fileName: String): ModuleBuilder = {
- val module = new ModuleBuilder(name, fileName, "" + null, this)
- addModule(name, module)
- return module
- }
-
- /** Returns the dynamic module with the specified name. */
- def GetDynamicModule(name: String): ModuleBuilder = {
- return GetModule(name).asInstanceOf[ModuleBuilder]
- }
-
- /** Saves this dynamic assembly to disk. */
- @throws(classOf[IOException])
- def Save(fileName: String) {
- generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
- ILPrinterVisitor.printAssembly(this, fileName)
- }
-
- @throws(classOf[IOException])
- def Save(destPath: String, sourceFilesPath: String) {
- generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
- ILPrinterVisitor.printAssembly(this, destPath, sourceFilesPath)
- }
-
- /** Returns the list of generated files from calling Save(). */
- def GetGeneratedFiles(): Array[String] = {
- return generatedFiles.toArray // (new Array[String](generatedFiles.size())).asInstanceOf[Array[String]]
- }
-
- /** Sets the entry point for this dynamic assembly. */
- def SetEntryPoint(entryMethod: MethodInfo) {
- EntryPoint = entryMethod
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
- // protected members
-
- // the access properties - Save, Run, RunAndSave
- private var access : Int = _
-
- // all extern assemblies used in this assembly builder
- protected var externAssemblies = scala.collection.mutable.Set.empty[Assembly]
-
- // register an extern assembly
- protected def registerExternAssembly(assembly: Assembly) {
- externAssemblies += assembly
- }
-
- // get all extern Assemblies used in this Assembly Builder
- def getExternAssemblies(): Array[Assembly] = {
- externAssemblies = scala.collection.mutable.Set[Assembly]()
- val iter = Assembly.assemblies.values().iterator
- while (iter.hasNext) {
- externAssemblies += iter.next.asInstanceOf[Assembly]
- }
- externAssemblies -= this
- return externAssemblies.toArray
- }
-
- def loadModules() {}
-
- // contains list of generated .msil files after calling Save()
- var generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
-
- //##########################################################################
- //##########################################################################
-
- /** the apply method for a visitor */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseAssemblyBuilder(this)
- }
-
- //##########################################################################
-}
-
-object AssemblyBuilderFactory {
- /**
- * Defines a dynamic assembly with the specified name.
- */
- def DefineDynamicAssembly(name: AssemblyName): AssemblyBuilder = {
- //Assembly.reset()
- return new AssemblyBuilder(name)
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
deleted file mode 100644
index ddd4708ecd..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import ch.epfl.lamp.compiler.msil.Type
-import java.io.IOException
-
-/**
- * Defines and represents a constructor of a dynamic class.
- * ConstructorBuilder is used to fully describe a constructor in
- * Microsoft intermediate language (MSIL), including the name, attributes,
- * signature, and constructor body. It is used in conjunction with the
- * TypeBuilder class to create classes at run time. Call DefineConstructor
- * to get an instance of ConstructorBuilder.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ConstructorBuilder(declType: Type, attrs: Int, paramTypes: Array[Type])
- extends ConstructorInfo(declType, attrs, paramTypes)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /** Defines a parameter of this constructor. */
- def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
- val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
- params(pos) = param
- return param
- }
-
- /** Returns an ILGenerator for this constructor. */
- def GetILGenerator(): ILGenerator = {
- return ilGenerator
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** The apply method for a visitor. */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseConstructorBuilder(this)
- }
-
- //##########################################################################
-
- // the Intermediate Language Generator
- // it contains the method's body
- protected var ilGenerator: ILGenerator = new ILGenerator(this)
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
deleted file mode 100644
index 7ef9dc7a5b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.FieldInfo
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.FieldAttributes
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-
-import java.io.IOException
-
-/**
- * Discovers the attributes of a field and provides access to field metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class FieldBuilder(name: String, declType: Type, attrs: Int, fieldTypeWithMods: PECustomMod)
- extends FieldInfo(name, declType, attrs, fieldTypeWithMods, null)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** the apply method for a visitor */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseFieldBuilder(this)
- }
-
- //##########################################################################
-
- protected var defaultValue: Object = _
-
- /** Sets the default value of this field. */
- def SetConstant(defaultValue: Object) {
- this.defaultValue = defaultValue
- }
-
- /** Specifies the field layout. */
- def SetOffset(iOffset: Int) {
- //this.fieldOffset = FieldAttributes.Offset.Value(iOffset)
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
deleted file mode 100644
index 5d74d3aa95..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-
-/**
- * Declares the possibility to set a custom attribute for a member
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-trait ICustomAttributeSetter {
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte])
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
deleted file mode 100644
index 2aa9a99054..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ /dev/null
@@ -1,539 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.util.Table
-import java.util.Stack
-import java.io.IOException
-import ILGenerator._
-
-/**
- * Generates Microsoft intermediate language (MSIL) instructions.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
- final class ILGenerator(_owner: MethodBase) extends Visitable {
-
- //##########################################################################
- // public interface
-
- /**
- * Puts the specified instruction onto the stream of instructions.
- */
- def Emit(opcode: OpCode) {
- // switch opcode
- if (opcode == OpCode.Ret) {
- emit(opcode, null, 0)
- } else {
- emit(opcode, null)
- }
- }
-
- /**
- * Puts the specified instruction and character argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Char) {
- emit(opcode,new Character(arg))
- }
-
- /**
- * Puts the specified instruction and metadata token for the
- * specified constructor onto the Microsoft intermediate language
- * (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: ConstructorInfo) {
- assert(arg != null)
- // newobj
- // pop size is the number of parameters
- emit(opcode,arg, OpCode.PUSH_size(opcode.CEE_push) -
- arg.GetParameters().length)
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate language (MSIL)
- * stream followed by the index of the given local variable.
- */
- def Emit(opcode: OpCode, arg: LocalBuilder) {
- assert(arg != null)
- // ldarg | ldarg.s | ldarga
- // ldarga.s | ldloc | ldloc.s | ldloca
- // ldloca.s | starg | starg.s | stloc
- // stloc.s
-
- // <instr_var> <localname>
- emit(opcode, arg)
- }
-
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Double) {
- // ldc.r4 | ldc.r8
- emit(opcode, new java.lang.Double(arg))
- }
-
- /**
- * Puts the specified instruction and metadata token for the
- * specified field onto the Microsoft intermediate language (MSIL)
- * stream of instructions.
- */
- def Emit(opcode: OpCode,arg: FieldInfo) {
- assert(arg != null)
- // ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld
- emit(opcode,arg)
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Short ) {
- emit(opcode, new java.lang.Short(arg))
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Int) {
- // ldc.i4 | ldc.i4.s | unaligned
- emit(opcode, new java.lang.Integer(arg))
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Long) {
- // ldc.i8
- emit(opcode, new java.lang.Long(arg))
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream and leaves space to include a label when
- * fixes are done.
- */
- def Emit(opcode: OpCode,label: Label) {
- assert(label != null)
- // beq | beq.s | bge | bge.s |
- // bge.un | bge.un.s | bgt | bgt.s | bgt.un | bgt.un.s |
- // ble | ble.s | ble.un | ble.un.s | blt | blt.s |
- // blt.un | blt.un.s | bne.un | bne.un.s | br | br.s |
- // brfalse | brfalse.s | brtrue | brtrue.s | leave | leave.s
-
- emit(opcode, label)
- // is the label initialized ? if true backward jump else forward jump
- if (label.isInitialized()) {
-// if (arg.stacksize != lastLabel.stacksize) {
-// System.err.println("ILGenerator.Emit: Stack depth differs depending on path:");
-// System.err.println("\tmethod = " + owner);
-// System.err.println("\tPC = 0x" + Table.short2hex(lastLabel.address));
-// }
- //assert arg.stacksize == lastLabel.stacksize;
- }
- else {
- label.setStacksize(lastLabel.getStacksize())
- }
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream and leaves space to include a label when
- * fixes are done.
- */
- def Emit(opcode: OpCode, arg: Array[Label] ) {
- assert(arg != null)
- // switch
-
- // <instr> ::= <instr_switch> ( <labels> )
- // Examples:
- // switch (0x3, -14, Label1)
- // switch (5, Label2)
- emit(opcode, arg, arg.length)
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream followed by the metadata token for the
- * given method.
- */
- def Emit(opcode: OpCode,arg: MethodInfo) {
- assert(arg != null)
- // call | callvirt | jmp | ldftn | ldvirtftn
- // pop size is the number of parameters
- // pop 1 more if method is not static !
- // push size is either 0 (void Method) either 1
- assert(arg.ReturnType != null, "No ReturnType: " + arg.DeclaringType + "::" + arg.Name)
-
- val popush: Int = if (opcode == OpCode.Ldftn ||
- opcode == OpCode.Ldvirtftn ||
- opcode == OpCode.Jmp)
- {
- OpCode.PUSH_size(opcode.CEE_push) - OpCode.POP_size(opcode.CEE_pop)
- } else if (opcode == OpCode.Calli || opcode == OpCode.Callvirt) {
- (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length - 1
- } else {
- (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length
- }
- emit(opcode, arg, popush)
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Float ) {
- emit(opcode, new java.lang.Float(arg))
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream followed by the metadata token for the
- * given string.
- */
- def Emit(opcode: OpCode, arg: String ) {
- assert(arg != null)
- // ldstr
- emit(opcode, arg)
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream followed by the metadata token for the
- * given type.
- */
- def Emit(opcode: OpCode, arg: Type) {
- assert(arg != null)
- // box | castclass | cpobj | initobj | isinst |
- // ldelema | ldobj | mkrefany | newarr | refanyval |
- // sizeof | stobj | unbox
-
- emit(opcode, arg)
- }
-
- /**
- * Puts a call or callvirt instruction onto the Microsoft intermediate
- * language (MSIL) stream.
- */
- def EmitCall(opcode: OpCode, arg: MethodInfo,
- optionalParameterTypes: Array[Type]) {
- assert(arg != null)
- // pop size is the number of parameters
- // push size is either 0 (void Method) either 1
- //System.out.println(arg.ReturnType.Size + " " + arg.GetParameters().length);
- emit(opcode, arg, (if(arg.ReturnType == VOID) 0 else 1) -
- arg.GetParameters().length)
- }
-
- /**
- * Emits the Microsoft intermediate language (MSIL) necessary to
- * call WriteLine with the given field.
- */
- def EmitWriteLine(arg: FieldInfo) {
- // first load field info
- // if static use OpCode.Ldsfld
- if (arg.IsStatic())
- Emit(OpCodes.Ldsfld, arg)
- else
- Emit(OpCodes.Ldfld, arg)
- // then call System.Console.WriteLine(arg.Type)
- val t: Type = Type.GetType("System.Console")
- val argsType: Array[Type] = new Array[Type](1)
- argsType(0) = arg.FieldType
- val m: MethodInfo = t.GetMethod("WriteLine", argsType)
- EmitCall(OpCode.Call, m, null)
- }
-
- /**
- * Emits the Microsoft intermediate language (MSIL) necessary
- * to call WriteLine with the given local variable.
- */
- def EmitWriteLine(arg: LocalBuilder) {
- // first load local variable
- Emit(OpCodes.Ldloc, arg)
- // then call System.Console.WriteLine(arg.Type)
- val t: Type = Type.GetType("System.Console")
- val argsType: Array[Type] = new Array[Type](1)
- argsType(0) = arg.LocalType
- val m: MethodInfo = t.GetMethod("WriteLine", argsType)
- EmitCall(OpCode.Call, m, null)
- }
-
- /**
- * Emits the Microsoft intermediate language (MSIL) to call
- * WriteLine with a string.
- */
- def EmitWriteLine(arg: String) {
- // first load string
- Emit(OpCode.Ldstr, arg)
- // then call System.Console.WriteLine(string)
- val t: Type = Type.GetType("System.Console")
- val argsType: Array[Type] = new Array[Type](1)
- argsType(0) = Type.GetType("System.String")
- val m: MethodInfo = t.GetMethod("WriteLine", argsType)
- EmitCall(OpCode.Call, m, null)
- }
-
- /**
- * Declares a local variable.
- */
- def DeclareLocal(localType: Type): LocalBuilder = {
- val l: LocalBuilder = new LocalBuilder(locals, localType)
- locals = locals + 1
- localList += l
- return l
- }
-
- /**
- * Returns a new label that can be used as a token for branching.
- * In order to set the position of the label within the stream, you
- * must call MarkLabel. This is just a token and does not yet represent
- * any particular location within the stream.
- */
- def DefineLabel():Label = {
- new Label.NormalLabel()
- }
-
- /**
- * Marks the Microsoft intermediate language (MSIL) stream's
- * current position with the given label.
- */
- def MarkLabel(label: Label) {
- label.mergeWith(lastLabel)
- /*
- label.address = lastLabel.address;
- //label.stacksize = lastLabel.stacksize;
- if (label.stacksize >= 0)
- lastLabel.stacksize = label.stacksize;
- */
- }
-
- /** Begins a lexical scope. */
- def BeginScope() {
- emitSpecialLabel(Label.NewScope)
- }
-
- /** Ends a lexical scope. */
- def EndScope() {
- emitSpecialLabel(Label.EndScope)
- }
-
- /**
- * Begins an exception block for a non-filtered exception.
- * The label for the end of the block. This will leave you in the correct
- * place to execute finally blocks or to finish the try.
- */
- def BeginExceptionBlock() {
- emitSpecialLabel(Label.Try)
- val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
- excStack.push(Label.Try, endExc)
- }
-
- /** Begins a catch block. */
- def BeginCatchBlock(exceptionType: Type) {
- val kind = excStack.peekKind()
- if (kind == Label.Kind.Try ||
- kind == Label.Kind.Catch) {
- /* ok */
- } else {
- throw new RuntimeException("Catch should follow either a try or catch")
- }
- val endExc: Label = excStack.popLabel()
- Emit(OpCodes.Leave, endExc)
- // the CLI automatically provide the exception object on the evaluation stack
- // we adjust the stacksize
- lastLabel.incStacksize()
- excStack.push(Label.Catch, endExc)
- emitSpecialLabel(Label.Catch, exceptionType)
- }
-
- /** Ends an exception block. */
- def EndExceptionBlock() {
- val kind = excStack.peekKind()
- if (kind == Label.Kind.Try) {
- throw new RuntimeException("Try block with neither catch nor finally")
- } else if (kind == Label.Kind.Catch) {
- Emit(OpCodes.Leave, excStack.peekLabel())
- } else if (kind == Label.Kind.Finally) {
- Emit(OpCodes.Endfinally)
- }
- MarkLabel(excStack.popLabel())
- emitSpecialLabel(Label.EndTry)
- }
-
- /**
- * Begins a finally block in the Microsoft intermediate language
- * (MSIL) instruction stream.
- */
- def BeginFinallyBlock() {
- val endExc: Label = excStack.popLabel()
- Emit(OpCodes.Leave, endExc)
- excStack.push(Label.Finally, endExc)
- emitSpecialLabel(Label.Finally)
- }
-
- /**
- * Emits an instruction to throw an exception.
- */
- def ThrowException(exceptionType: Type) {
- assert(exceptionType != null)
- if (!exceptionType.isSubtypeOf(Type.GetType("System.Exception")))
- throw new RuntimeException
- (exceptionType + " doesn't extend System.Exception" )
- val ctor: ConstructorInfo = exceptionType.GetConstructor(Type.EmptyTypes)
- if (ctor == null)
- throw new RuntimeException("Type " + exceptionType
- + "doesn't have a default constructor")
- Emit(OpCodes.Newobj, ctor)
- Emit(OpCodes.Throw)
- }
-
- /**
- * sets the line of the source file corresponding to the next instruction
- */
- def setPosition(line: Int) {
- if (line != 0) lineNums.put(lastLabel, Integer.toString(line))
- }
-
- def setPosition(line: Int, filename: String) {
- if (line != 0) lineNums.put(lastLabel, line + " '" + filename + "'")
- }
-
- def setPosition(startLine: Int, endLine: Int, startCol: Int, endCol: Int, filename: String) {
- val lineRange = startLine + "," + endLine
- val colRange = startCol + "," + endCol
- lineNums.put(lastLabel, lineRange + ":" + colRange + " '" + filename + "'")
- }
-
- def getLocals(): Array[LocalBuilder] = localList.toArray
-
- def getLabelIterator() = labelList.iterator
-
- def getOpcodeIterator() = opcodeList.iterator
-
- def getArgumentIterator() = argumentList.iterator
-
- //##########################################################################
- // private implementation details
-
-
-
- // the local variable list
- private final val localList = scala.collection.mutable.ArrayBuffer.empty[LocalBuilder]
-
- // the label list, the opcode list and the opcode argument list
- // labelList is an array of Label
- // opcodeList is an array of OpCode
- // argumentList is an array of Object (null if no argument)
- private final val labelList = scala.collection.mutable.ArrayBuffer.empty[Label]
- private final val opcodeList = scala.collection.mutable.ArrayBuffer.empty[OpCode]
- private final val argumentList = scala.collection.mutable.ArrayBuffer.empty[Object]
-
- // the program counter (pc)
- // also called the stream's current position
- private var pc: Int = 0
-
- // last label
- private var lastLabel: Label = new Label.NormalLabel(pc,0)
-
- // the maximum size of stack
- private var maxstack: Int = 0
-
- // the number of the locals
- private var locals: Int = 0
-
- // stack of label for exception mechanism
- private var excStack: ExceptionStack = new ExceptionStack()
-
- // the method info owner of this ILGenerator
- var owner: MethodBase = _owner
-
- val lineNums = scala.collection.mutable.Map.empty[Label, String]
-
-
- def getMaxStacksize(): Int = { this.maxstack }
-
- // private emit with Object Argument
- private def emit(opcode: OpCode, arg: Object) {
- emit(opcode, arg, opcode.CEE_popush)
- }
-
- // private emit with Object Argument and override POPUSH
- private def emit(opcode: OpCode, arg: Object, overridePOPUSH: Int) {
- // add label, opcode and argument
- labelList += lastLabel
- opcodeList += opcode
- argumentList += arg
- // compute new lastLabel (next label)
- val stackSize: Int = lastLabel.getStacksize() + overridePOPUSH
- if (stackSize < 0) {
- val msg = "ILGenerator.emit(): Stack underflow in method: " + owner
- scala.Console.println(msg)
- // throw new RuntimeException(msg)
- }
- if (stackSize > maxstack)
- maxstack = stackSize
- var address: Int = lastLabel.getAddress() + opcode.CEE_length
- if (opcode.CEE_opcode == OpCode.CEE_SWITCH) {
- address = address + 4*arg.asInstanceOf[Array[Label]].length
- }
- lastLabel = new Label.NormalLabel(address, stackSize)
- pc = pc + 1
- }
-
- def Ldarg0WasJustEmitted() : Boolean = {
- if(opcodeList.isEmpty)
- return false
- val lastEmitted = opcodeList(opcodeList.size - 1)
- lastEmitted eq OpCode.Ldarg_0
- }
-
- private def emitSpecialLabel(l: Label) {
- emitSpecialLabel(l, null)
- }
- private def emitSpecialLabel(l: Label, catchType: Type) {
- labelList += l
- opcodeList += null
- argumentList += catchType
- }
-
- //##########################################################################
- //
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseILGenerator(this)
- }
-
- //##########################################################################
-} // class ILGenerator
-
-
-object ILGenerator {
-
- val VOID: Type = Type.GetType("System.Void")
- val NO_LABEL: String = ""
-
- private final class ExceptionStack {
- private val labels = new scala.collection.mutable.Stack[Label]()
- private val kinds = new scala.collection.mutable.Stack[Label]()
- def ExceptionStack() {}
- def pop() { labels.pop; kinds.pop }
- def push(kind: Label, label: Label) {
- kinds.push(kind); labels.push(label)
- }
- def peekKind(): Label.Kind = kinds.top.getKind
- def peekLabel(): Label = labels.top
- def popLabel(): Label = { kinds.pop(); labels.pop() }
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
deleted file mode 100644
index 0ed5e3f3bb..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ /dev/null
@@ -1,861 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.File
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Comparator
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly in a single or multiple files. Then this file can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-abstract class ILPrinterVisitor extends Visitor {
-
- import ILPrinterVisitor._
- import OpCode._
-
- //##########################################################################
-
- protected final val assemblyNameComparator =
- new scala.math.Ordering[Assembly]() {
- override def compare(o1: Assembly, o2: Assembly): Int = {
- val a1 = o1.asInstanceOf[Assembly]
- val a2 = o2.asInstanceOf[Assembly]
- return a1.GetName().Name.compareTo(a2.GetName().Name)
- }
- }
-
- // the output file writer
- protected var out: PrintWriter = null
-
- // the left margin
- private var lmargin = 0
-
- // indicate a newline
- private var newline = true
-
- // print types without or with members?
- protected var nomembers: Boolean = false
-
- // external assemblies
- protected var as: Array[Assembly] = null
-
- private def align() {
- if (newline)
- padding = lmargin
- printPadding()
- newline = false
- }
- private def indent() {
- lmargin += TAB
- }
- private def undent() {
- lmargin -= TAB
- assert(lmargin >= 0)
- }
-
- private var padding = 0
- private def pad(n: Int) {
- assert(n >= 0, "negative padding: " + n)
- padding += n
- }
- private def printPadding() {
- if (padding <= 0)
- return
- while (padding > SPACES_LEN) {
- out.print(SPACES)
- padding -= SPACES_LEN
- }
- out.print(SPACES.substring(0, padding))
- padding = 0
- }
-
- // methods to print code
- protected def print(s: String) { align(); out.print(s)}
- protected def print(o: Object) { align(); out.print(o) }
- protected def print(c: Char) { align(); out.print(c) }
- protected def print(`val`: Int) { align(); out.print(`val`)}
- protected def print(`val`: Long){ align(); out.print(`val`)}
- protected def println() { out.println(); newline = true; padding = 0 }
- protected def println(c: Char) { print(c); println() }
- protected def println(i: Int) { print(i); println() }
- protected def println(l: Long) { print(l); println() }
- protected def println(s: String){ print(s); println() }
- protected def println(o: Object){ print(o); println() }
- protected def printName(name: String) {
- var ch = name.charAt(0)
- //if (Character.isLetter(ch) && Character.isLowerCase(ch)) {
- if ((ch != '.') && (ch != '!')) {
- print('\''); print(name); print('\'')
- } else
- print(name)
- }
-
- protected def printAssemblyBoilerplate() {
- // print all the external assemblies
- for (j <- 0 until as.length) {
- printAssemblySignature(as(j), true)
- }
- // print assembly declaration
- printAssemblySignature(currAssembly, false)
- }
-
- // the entrypoint method
- protected var entryPoint: MethodInfo = null
-
- // current opcode argument
- protected var argument: Object = null
-
- /***/
- @throws(classOf[IOException])
- protected def print(vAble: Visitable) {
- if (vAble != null)
- vAble.apply(this)
- }
-
- /**
- * Visit an AssemblyBuilder
- */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder)
-
- protected var currentModule: Module = null
- /**
- * Visit a ModuleBuilder
- */
- @throws(classOf[IOException])
- def caseModuleBuilder(module: ModuleBuilder)
-
- protected var currentType: Type = null
-
- def printTypeParams(sortedTVars : Array[GenericParamAndConstraints]) {
-
- def constraintFlags(tVar : GenericParamAndConstraints) = {
- val varianceDirective = (if (tVar.isCovariant) "+ " else (if (tVar.isContravariant) "- " else ""))
- val typeKindDirective = (if (tVar.isReferenceType) "class " else (if (tVar.isValueType) "valuetype " else ""))
- val dfltConstrDirective = (if (tVar.hasDefaultConstructor) ".ctor " else "")
- varianceDirective + typeKindDirective + dfltConstrDirective
- }
-
- def tparamName(tVar : GenericParamAndConstraints) = {
- /* TODO Type-params in referenced assemblies may lack a name (those in a TypeBuilder or MethodBuilder shouldn't).
- Given that we need not list (in ilasm syntax) the original type-params' names when
- providing type arguments to it, the only type-param-names we'll serialize into a .msil file
- are those for type-params in a TypeBuilder or MethodBuilder. Still, more details on this
- appear in Sec. 4.5 "Faulty metadata in XMLReaderFactory" of
- http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/Libs4Lib.pdf
-
- To avoid name clashes when choosing a param name,
- first collect all existing tparam-names from a type (and its nested types).
- Not that those names are needed (ordinal positions can be used instead)
- but will look better when disassembling with ildasm. */
- assert(tVar.Name != null)
- tVar.Name
- }
-
- if(sortedTVars.length == 0) { return }
- print('<')
- val lastIdx = sortedTVars.length - 1
- for (it <- 0 until sortedTVars.length) {
- val tVar = sortedTVars(it)
- print(constraintFlags(tVar))
- if(tVar.Constraints.length > 0) {
- print('(')
- val lastCnstrtIdx = tVar.Constraints.length - 1
- for (ic <- 0 until tVar.Constraints.length) {
- val cnstrt = tVar.Constraints(ic)
- printReference(cnstrt)
- if (ic < lastIdx) { print(", ") }
- }
- print(')')
- }
- print(" " + tparamName(tVar))
- if (it < lastIdx) { print(", ") }
- }
- print('>')
- }
-
- /**
- * Visit a TypeBuilder
- */
- @throws(classOf[IOException])
- def caseTypeBuilder(`type`: TypeBuilder) {
- currentType = `type`
- if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) {
- print(".namespace \'" ); print(`type`.Namespace); println("\'")
- println("{"); indent()
- }
- print(".class ")
- // <classHead> ::=
- // <classAttr>* <id>
- // [extends <typeReference>]
- // [implements <typeReference> [, <typeReference>]*]
- print(TypeAttributes.toString(`type`.Attributes))
- print(" \'"); print(`type`.Name); print("\'")
- printTypeParams(`type`.getSortedTVars())
- if (`type`.BaseType() != null) {
- println()
- print(" extends ")
- printReference(`type`.BaseType())
- }
- var ifaces: Array[Type] = `type`.getInterfaces()
- if (ifaces.length > 0) {
- println()
- print(" implements ")
- for (i <- 0 until ifaces.length) {
- if (i > 0) {
- println(",")
- print(" ")
- }
- printReference(ifaces(i))
- }
- }
- println()
- println("{")
- indent()
- if (!nomembers && `type`.sourceFilename != null)
- println(".line " + `type`.sourceLine
- + " '" + `type`.sourceFilename + "'")
- if (!nomembers) {
- printAttributes(`type`)
- }
- // print nested classes
- val nested = `type`.nestedTypeBuilders.iterator
- while(nested.hasNext)
- print(nested.next().asInstanceOf[TypeBuilder])
-
- // print each field
- val fields = `type`.fieldBuilders.iterator
- while(fields.hasNext)
- print(fields.next().asInstanceOf[FieldBuilder])
-
- // print each constructor
- val constrs = `type`.constructorBuilders.iterator
- while (constrs.hasNext)
- print(constrs.next().asInstanceOf[ConstructorBuilder])
-
- // print each method
- val methods = `type`.methodBuilders.iterator
- while (methods.hasNext) {
- val method = methods.next().asInstanceOf[MethodBuilder]
- assert(method.DeclaringType == `type`)
- print(method)
- }
-
- undent(); println("}")
- if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) {
- undent(); println("}")
- }
- currentType = null
- }
-
- /**
- * Visit a FieldBuilder
- */
- @throws(classOf[IOException])
- def caseFieldBuilder(field: FieldBuilder) {
- if (nomembers) return
- // [[int32]] <fieldAttr>* <type> <id> [= <fieldInit> | at <dataLabel>]
- print(".field ")
- print(FieldAttributes.toString(field.Attributes))
- print(" "); printSignature(field.FieldType, field.cmods)
- print(" \'"); print(field.Name); print("\'")
- if (field.IsLiteral()) {
- print(" = ")
- val value = field.getValue()
- if (value == null) {
- print("nullref")
- } else if (value.isInstanceOf[String]) {
- print(msilString(value.asInstanceOf[String]))
- } else if (value.isInstanceOf[Boolean]) {
- print("bool (")
- print(if((value.asInstanceOf[Boolean]).booleanValue()) { "true" } else { "false" })
- print(")")
- } else if (value.isInstanceOf[Byte]) {
- print("int8 (")
- print(value)
- print(")")
- } else if (value.isInstanceOf[java.lang.Short]) {
- print("int16 (")
- print(value)
- print(")")
- } else if (value.isInstanceOf[Character]) {
- print("char (")
- print((value.asInstanceOf[Character]).charValue())
- print(")")
- } else if (value.isInstanceOf[Integer]) {
- print("int32 (")
- print((value.asInstanceOf[Integer]).intValue())
- print(")")
- } else if (value.isInstanceOf[Long]) {
- print("int64 (")
- print((value.asInstanceOf[Long]).longValue())
- print(")")
- } else if (value.isInstanceOf[Float]) {
- print(msilSyntaxFloat(value.asInstanceOf[Float]))
- } else if (value.isInstanceOf[Double]) {
- print(msilSyntaxDouble(value.asInstanceOf[Double]))
- } else {
- throw new Error("ILPrinterVisitor: Illegal default value: "
- + value.getClass())
- }
- }
- println()
- printAttributes(field)
- }
-
- def msilSyntaxFloat(valFlo: java.lang.Float) : String = {
- // !!! check if encoding is correct
- val bits = java.lang.Float.floatToRawIntBits(valFlo.floatValue())
- /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */
- /* Note: no value is equal to Nan, including NaN. Thus, x == Float.NaN always evaluates to false. */
- val res = if (valFlo.isNaN) "0xFFC00000 /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */
- else if (java.lang.Float.NEGATIVE_INFINITY == valFlo.floatValue) "0xFF800000 /* NEGATIVE_INFINITY */ "
- else if (java.lang.Float.POSITIVE_INFINITY == valFlo.floatValue) "0x7F800000 /* POSITIVE_INFINITY */ "
- else bits
- "float32 (" + res + ")"
- }
-
- def msilSyntaxDouble(valDou: java.lang.Double) : String = {
- // !!! check if encoding is correct
- var bits = java.lang.Double.doubleToRawLongBits(valDou.doubleValue())
- /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */
- /* Note: no value is equal to Nan, including NaN. Thus, x == Double.NaN always evaluates to false. */
- val res = if (valDou.isNaN) "0xffffffffffffffff /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */
- else if (java.lang.Double.NEGATIVE_INFINITY == valDou.doubleValue) "0xfff0000000000000 /* NEGATIVE_INFINITY */ "
- else if (java.lang.Double.POSITIVE_INFINITY == valDou.doubleValue) "0x7ff0000000000000 /* POSITIVE_INFINITY */ "
- else bits
- // float64(float64(...)) != float64(...)
- "float64 (" + res + ")"
- }
-
- /**
- * Visit a ConstructorBuilder
- */
- @throws(classOf[IOException])
- def caseConstructorBuilder(constr: ConstructorBuilder) {
- if (nomembers) return
- print(".method "); printHeader(constr, VOID)
- println(); println("{"); indent()
- printAttributes(constr)
- try {
- print(constr.GetILGenerator())
- } catch {
- case e : RuntimeException => {
- System.err.println("In method " + constr)
- e.printStackTrace()
- }
- }
- undent(); println("}")
- }
-
- /**
- * Visit a MethodBuilder
- */
- @throws(classOf[IOException])
- def caseMethodBuilder(method: MethodBuilder) {
- if (nomembers) return
- print(".method "); printHeader(method, method.ReturnType)
- if (method.IsAbstract()
- || (method.DeclaringType != null
- && method.DeclaringType.IsInterface()
- && !method.IsStatic()))
- {
- println(" {"); indent()
- printAttributes(method)
- undent(); println("}")
- } else {
- println(); println("{"); indent()
- printAttributes(method)
- if (method == entryPoint)
- println(".entrypoint")
- try {
- print(method.GetILGenerator())
- } catch {
- case e: RuntimeException =>
- System.err.println("In method " + method)
- e.printStackTrace()
- }
- undent(); println("}")
- }
- }
-
- /**
- * Visit a ParameterBuilder
- */
- @throws(classOf[IOException])
- def caseParameterBuilder(param: ParameterBuilder) {
- print(ParameterAttributes.toString(param.Attributes))
- printSignature(param.ParameterType)
- //print(' ') print(marshal)
- print(' '); printName(param.Name)
- }
-
- var locals: Array[LocalBuilder] = null
- /**
- * Visit an ILGenerator
- */
- @throws(classOf[IOException])
- def caseILGenerator(code: ILGenerator) {
- // print maxstack
- println(".maxstack " + code.getMaxStacksize())
- // get the local variables
- locals = code.getLocals()
- if (locals.length > 0) {
- println(".locals init (")
- indent()
- for (i <- 0 until locals.length) {
- if (i > 0) println(",")
- print(locals(i))
- } // end while
- undent()
- println(")")
- }
- // get 3 iterators for the 3 lists
- val itL = code.getLabelIterator()
- val itO = code.getOpcodeIterator()
- val itA = code.getArgumentIterator()
- // iterate over each opcode
- while (itO.hasNext) {
- // first print label
- val label = itL.next
- val oOpt = code.lineNums.get(label)
- if (oOpt.isDefined) {
- println(".line " + oOpt.get)
- }
- argument = itA.next.asInstanceOf[Object]
- printLabel(label)
- val o2 = itO.next
- if (o2 != null) {
- print(" ")
- print(o2.asInstanceOf[OpCode])
- }
- println()
- } // end while
- }
-
- /**
- * visit an OpCode
- */
- @throws(classOf[IOException])
- def caseOpCode(opCode: OpCode) {
- var opString = opCode.toString()
- print(opString)
- pad(14 - opString.length())
-
- // switch opcode
- if (opCode == OpCode.Ldstr) {
- print(msilString(argument.toString()))
- } else if(opCode == OpCode.Switch) {
- // switch ( <labels> )
- print("(")
- val targets = argument.asInstanceOf[Array[Label]]
- val m = targets.length
- for (i <- 0 until m) {
- if (i != 0) print(", ")
- print(targets(i))
- } // end for
- print(")")
- } else if(opCode == OpCode.Call || opCode == OpCode.Callvirt || opCode == OpCode.Jmp || opCode == OpCode.Ldftn || opCode == OpCode.Ldvirtftn) {
- // call | callvirt | jmp | ldftn | ldvirtftn
- // <instr_method> <callConv> <type> [ <typeSpec> :: ] <methodName>
- printSignature(argument.asInstanceOf[MethodBase])
- } else if (opCode == OpCode.Newobj) {
- printSignature(argument.asInstanceOf[ConstructorInfo])
- // ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld
- } else if (opCode == OpCode.Ldfld || opCode == OpCode.Ldflda || opCode == OpCode.Ldsfld || opCode == OpCode.Ldsflda || opCode == OpCode.Stfld || opCode == OpCode.Stsfld) {
- printSignature(argument.asInstanceOf[FieldInfo])
- } else if (opCode == OpCode.Castclass || opCode == OpCode.Isinst || opCode == OpCode.Ldobj || opCode == OpCode.Newarr) {
- printSignature(argument.asInstanceOf[Type])
- } else if (opCode == OpCode.Box || opCode == OpCode.Unbox || opCode == OpCode.Ldtoken || opCode == OpCode.Initobj) {
- printReference(argument.asInstanceOf[Type])
- } else if (opCode == OpCode.Ldloc || opCode == OpCode.Ldloc_S || opCode == OpCode.Ldloca || opCode == OpCode.Ldloca_S || opCode == OpCode.Stloc || opCode == OpCode.Stloc_S) {
- val loc = argument.asInstanceOf[LocalBuilder]
- print(loc.slot); print("\t// "); printSignature(loc.LocalType)
- print(" \'"); print(loc.name); print("\'")
- //print("'") print(((LocalBuilder)argument).name) print("'")
- } else if (opCode == OpCode.Ldloc_0 || opCode == OpCode.Ldloc_1 || opCode == OpCode.Ldloc_2 || opCode == OpCode.Ldloc_3 ) {
- val loc = locals(opCode.CEE_opcode - OpCode.CEE_LDLOC_0)
- print("\t// "); printSignature(loc.LocalType)
- print(" \'"); print(loc.name); print("\'")
- } else if (opCode == OpCode.Stloc_0 || opCode == OpCode.Stloc_1 || opCode == OpCode.Stloc_2 || opCode == OpCode.Stloc_3 ) {
- val loc = locals(opCode.CEE_opcode - OpCode.CEE_STLOC_0)
- print("\t// "); printSignature(loc.LocalType)
- print(" \'"); print(loc.name); print("\'")
- } else if (opCode == OpCode.Readonly) {
- // nothing to do
- } else if (opCode == OpCode.Constrained) {
- printReference(argument.asInstanceOf[Type])
- } else if (opCode == OpCode.Ldelema) {
- printReference(argument.asInstanceOf[Type])
- } else {
- // by default print toString argument if any
- if (argument != null) {
- val strArgument = java.lang.String.valueOf(argument)
- if ( argument.isInstanceOf[java.lang.Float]
- && ( strArgument.equals("NaN")
- || strArgument.equals("-Infinity")
- || strArgument.equals("Infinity")))
- print(msilSyntaxFloat(argument.asInstanceOf[java.lang.Float]))
- else if ( argument.isInstanceOf[java.lang.Double]
- && ( strArgument.equals("NaN")
- || strArgument.equals("-Infinity")
- || strArgument.equals("Infinity")))
- print(msilSyntaxDouble(argument.asInstanceOf[java.lang.Double]))
- else print(strArgument)
- }
-
- } // end switch
- }
-
- /**
- * Visit a Label
- */
- def printLabel(label: Label) {
- val kind = label.getKind()
- if (kind == Label.Kind.Normal) {
- print(label+ ": ")
- } else if (kind == Label.Kind.NewScope) {
- print("{"); indent()
- } else if (kind == Label.Kind.EndScope) {
- undent(); print("}")
- } else if (kind == Label.Kind.Try) {
- print(".try {"); indent()
- } else if (kind == Label.Kind.Catch) {
- undent()
- println("}")
- print("catch ")
- printReference(argument.asInstanceOf[Type])
- print(" {")
- indent()
- } else if (kind == Label.Kind.Filter) {
- undent()
- println("}")
- print("filter {")
- indent()
- } else if (kind == Label.Kind.EndFilter) {
- print("endfilter")
- undent()
- println("}")
- } else if (kind == Label.Kind.Finally) {
- undent()
- println("}")
- print("finally {")
- indent()
- } else if (kind == Label.Kind.EndTry) {
- undent()
- print("}")
- }
- }
-
- /**
- * Visit a LocalBuilder
- */
- @throws(classOf[IOException])
- def caseLocalBuilder(localBuilder: LocalBuilder) {
- // print type
- printSignature(localBuilder.LocalType)
- // space
- print(" \'")
- // print name
- print(localBuilder.name)
- print("\'")
- }
-
-
- //##########################################################################
-
- def printAssemblySignature(assem: Assembly, extern: Boolean) {
- print(".assembly ")
- if (extern)
- print("extern ")
- val an = assem.GetName()
- printName(an.Name); println()
- println("{")
- if (!extern)
- printAttributes(assem)
- val v = an.Version
- if (v != null) {
- print(" .ver "); print(v.Major); print(':'); print(v.Minor)
- print(':'); print(v.Build); print(':')
- print(v.Revision); println()
- }
- var key = an.GetPublicKeyToken()
- if (key != null) {
- print(" .publickeytoken = ("); print(PEFile.bytes2hex(key))
- println(")")
- } else {
- key = an.GetPublicKey()
- if (key != null) {
- print(" .publickey = ("); print(PEFile.bytes2hex(key))
- println(")")
- }
- }
- println("}")
- }
-
-
- def printSignature(field: FieldInfo) {
- printSignature(field.FieldType, field.cmods)
- //print(' ') print(owner)
- print(' ')
- //if (field.IsStatic && field.DeclaringType != currentType) {
- printReference(field.DeclaringType)
- print("::")
- //}
- printName(field.Name)
- }
-
- // print method head
- @throws(classOf[IOException])
- def printHeader(method: MethodBase, returnType: Type) {
- print(MethodAttributes.toString(method.Attributes))
- print(' '); print(CallingConventions.toString(method.CallingConvention))
- print(' '); printSignature(returnType)
- //print(' ') print(marshal)
- print(' '); printName(method.Name)
- if(method.isInstanceOf[MethodInfo]) {
- val mthdInfo = method.asInstanceOf[MethodInfo]
- printTypeParams(mthdInfo.getSortedMVars())
- }
- val params = method.GetParameters()
- print('(')
- for (i <- 0 until params.length) {
- if (i > 0) print(", ")
- print(params(i).asInstanceOf[ParameterBuilder])
- }
- print(") ")
-
- print(MethodImplAttributes
- .toString(method.GetMethodImplementationFlags()))
- }
-
-
- def printSignature(method: MethodBase) {
- var returnType: Type = null
- if (method.isInstanceOf[MethodInfo])
- returnType = (method.asInstanceOf[MethodInfo]).ReturnType
- else if (method.isInstanceOf[ConstructorInfo])
- returnType = VOID
- else
- throw new RuntimeException()
-
- val s = CallingConventions.toString(method.CallingConvention)
- print(s)
- if (s.length() > 0) print(' ')
- printSignature(returnType)
- //print(' ') print(owner)
- print(' '); printReference(method.DeclaringType)
- print("::"); printName(method.Name)
-
- var params = method.GetParameters()
- print("(")
- for (i <- 0 until params.length) {
- if (i > 0) print(", ")
- printSignature(params(i).ParameterType)
- }
- print(")")
- }
-
- def printSignature(marked: Type, cmods: Array[CustomModifier]) {
- printSignature(marked)
- if( (cmods != null) && !cmods.isEmpty ) {
- print(" ")
- for(cm <- cmods) {
- print(if (cm.isReqd) "modreq( " else "modopt( ")
- printReference(cm.marker)
- print(" ) ")
- }
- }
- }
-
- def printSignature(`type`: Type) {
- val sigOpt = primitive.get(`type`)
- if (sigOpt.isDefined) {
- print(sigOpt.get)
- return
- }
- if (`type`.HasElementType()) {
- printSignature(`type`.GetElementType())
- if (`type`.IsArray())
- print("[]")
- else if (`type`.IsPointer())
- print('*')
- else if (`type`.IsByRef())
- print('&')
- } else {
- val preref = if (`type`.isInstanceOf[Type.TMVarUsage]) ""
- else if(`type`.IsValueType()) "valuetype "
- else "class "
- print(preref)
- printReference(`type`)
- }
- }
-
- def printReference(`type`: Type) {
- if (`type`.Module != null) { // i.e. not PrimitiveType and not TMVarUsage
- if (`type`.Assembly() != currentModule.Assembly) {
- print('['); print(`type`.Assembly().GetName().Name); print("]")
- } else if (`type`.Module != currentModule) {
- print("[.module "); print(`type`.Module.Name); print("]")
- }
- }
- printTypeName(`type`)
- }
-
- def printTypeName(`type`: Type) {
- if (`type`.isInstanceOf[ConstructedType]) {
- val ct = `type`.asInstanceOf[ConstructedType]
- printTypeName(ct.instantiatedType)
- print("<")
- var i = 0
- while (i < ct.typeArgs.length) {
- val ta = ct.typeArgs(i)
- val sigOpt = primitive.get(ta)
- if (sigOpt.isDefined) print(sigOpt.get)
- else printTypeName(ta); /* should be printSignature, but don't want `class` or `valuetype`
- appearing before a type param usage. */
- i = i + 1;
- if (i < ct.typeArgs.length) {
- print(", ")
- }
- }
- print(">")
- } else if (`type`.DeclaringType != null) {
- printTypeName(`type`.DeclaringType)
- print('/')
- printName(`type`.Name)
- } else {
- printName(`type`.FullName)
- }
- }
-
- def printAttributes(icap: ICustomAttributeProvider) {
- var attrs = icap.GetCustomAttributes(false)
- for (i <- 0 until attrs.length) {
- print(".custom ")
- printSignature((attrs(i).asInstanceOf[Attribute]).getConstructor())
- print(" = (")
- print(PEFile.bytes2hex((attrs(i).asInstanceOf[Attribute]).getValue()))
- println(")")
- }
- }
-
- //##########################################################################
-
-} // class ILPrinterVisitor
-
-object ILPrinterVisitor {
- final val VOID: Type = Type.GetType("System.Void")
- protected final val TAB = 4
-
- protected final val SPACES = " "
- protected final val SPACES_LEN = SPACES.length()
-
- def hasControlChars(str: String): Boolean = {
- for(i <- 0 until str.length()) {
- var ch = str.charAt(i)
- ch match {
- case '\b' =>
- case '\t' =>
- case '\n' =>
- case '\f' =>
- case '\r' =>
- case _ => if(Character.isISOControl(ch)) return true
- }
- }
- return false
- }
-
- final val EMPTY: String = ""
- def msilString(s: String): String = {
- if (hasControlChars(s)) {
- try {
- return "bytearray (" + PEFile.bytes2hex(s.getBytes("UTF-16LE")) + ")"
- } catch {
- case e : java.io.UnsupportedEncodingException => throw new RuntimeException(e)
- }
- }
- var str = new StringBuffer(s)
- var ss = EMPTY
- var i = 0
- while(i < str.length()) {
- ss = EMPTY
- val c = str.charAt(i)
- c match {
- case '\b' => ss = "\\b"
- case '\t' => ss = "\\t"
- case '\n' => ss = "\\n"
- case '\f' => ss = "\\f"
- case '\r' => ss = "\\r"
- case '\"' => ss = "\\\""
- case '\'' => ss = "\\\'"
- case '\\' => ss = "\\\\"
- case _ => if (Character.isISOControl(c))
- ss = "\\u" + PEFile.int2hex(Character.getNumericValue(c))
- }
- if (ss != EMPTY) {
- str.replace(i, i + 1, ss)
- i = i + ss.length() - 1
- }
- i = i + 1
- }
- return "\"" + str.toString() + "\""
- }
-
- /**
- * the main printer method
- */
- @throws(classOf[IOException])
- def printAssembly(assemblyBuilder: AssemblyBuilder, fileName: String) {
- assemblyBuilder.apply(new SingleFileILPrinterVisitor(fileName))
- }
-
- @throws(classOf[IOException])
- def printAssembly(assemblyBuilder: AssemblyBuilder, destPath: String, sourceFilesPath: String) {
- assemblyBuilder.apply(new MultipleFilesILPrinterVisitor(destPath, sourceFilesPath))
- }
-
- /** The current assembly */
- var currAssembly: Assembly = _
-
- final var primitive = scala.collection.mutable.Map.empty[Type, String]
- def addPrimitive(name: String, sig: String) {
- var `type` =
- Type.GetType(name)
- assert(`type` != null, "Cannot lookup primitive type " + `type`)
- primitive.put(`type`, sig)
- }
-
- addPrimitive("System.Object", "object")
- addPrimitive("System.String", "string")
- addPrimitive("System.Void", "void")
- addPrimitive("System.Boolean", "bool")
- addPrimitive("System.Char", "char")
- addPrimitive("System.SByte", "int8")
- addPrimitive("System.Byte", "unsigned int8")
- addPrimitive("System.Int16", "int16")
- addPrimitive("System.UInt16", "unsigned int16")
- addPrimitive("System.Int32", "int32")
- addPrimitive("System.UInt32", "unsigned int32")
- addPrimitive("System.Int64", "int64")
- addPrimitive("System.UInt64", "unsigned int64")
- addPrimitive("System.IntPtr", "native int")
- addPrimitive("System.UIntPtr", "unsigned native int")
- addPrimitive("System.Single", "float32")
- addPrimitive("System.Double", "float64")
- addPrimitive("System.TypedReference", "typedref")
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
deleted file mode 100644
index 22c1b1150b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-
-/**
- * Represents a label in the instruction stream. Label is used in conjunction
- * with the ILGenerator class.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-abstract class Label protected {
- import Label._
- def isInitialized(): Boolean
- def getKind(): Kind
- def getAddress(): Int
- def getStacksize(): Int
- def setStacksize(stacksize: Int): Unit
- def incStacksize(): Unit
- def mergeWith(that: Label): Unit
-}
-
-object Label {
- final val DUMMY: Int = -((1<<31)-1)
-
- //##########################################################################
-
- final class NormalLabel(_address: Int, _stacksize: Int) extends Label {
-
- //##########################################################################
- // protected constructors
-
- //the position of the label
- private var address: Int = _address
-
- //the stacksize at the label
- private var stacksize: Int = _stacksize
-
- def this() {
- this(-1, DUMMY)
- }
-
- def this(that: NormalLabel) {
- this(that.getAddress(), that.getStacksize())
- }
-
- //##########################################################################
- // instrumental methods only used by ILGenerator
-
- def isInitialized() = (getAddress() != -1) || (stacksize != DUMMY)
-
- def getAddress() = address
-
- def getStacksize() = stacksize
-
- def setStacksize(stacksize: Int) {
- assert(stacksize >= 0)
- this.stacksize = stacksize
- }
-
- def incStacksize() {
- stacksize = stacksize + 1
- }
-
- def getKind(): Kind = Kind.Normal
-
- def mergeWith(that: Label) {
- //assert address < 0 : "this.address = " + address + " that.address = " + that.address
- address = that.getAddress()
-
- // assert stacksize == that.stacksize
- // : "this.stacksize = " + stacksize + " that.stacksize = "
- // + that.stacksize
- // stacksize = that.stacksize
- val ss: Int = math.max(stacksize, that.getStacksize())
- stacksize = ss
- that.setStacksize(ss)
- }
-
- //##########################################################################
- //
-
- /**
- * the toString Method return the label name
- * it's "IL" + address
- */
- override def toString(): String = {
- var pad: String = ""
- if (address < 16) pad = "000"
- else if (address < 256) pad = "00"
- else if (address < 4096) pad = "0"
- return "IL_" + pad + Integer.toHexString(address)
- }
-
- def getString(): String = {
- val name = super.toString()
- val i: Int = name.lastIndexOf('.')
- return name.substring(i+1, name.length())
- }
- }
-
- //########################################################################
- // Special Labels
-
- final class SpecialLabel(_kind: Label.Kind) extends Label {
- private final var kind: Label.Kind = _kind
- def isInitialized() = true
- def getAddress(): Int = { throw new RuntimeException("" + kind.toString()) }
- def getStacksize(): Int = { throw new RuntimeException("" + kind.toString()) }
- def setStacksize(stacksize: Int) { throw new RuntimeException(kind.toString()) }
- def incStacksize() { throw new RuntimeException(kind.toString()) }
- def getKind(): Kind = kind
- def mergeWith(that: Label) { throw new RuntimeException(kind.toString()) }
- override def toString(): String = "Label(" + kind.toString() + ")"
- }
-
- final val NewScope: Label = new SpecialLabel(Kind.NewScope)
- final val EndScope: Label = new SpecialLabel(Kind.EndScope)
- final val Try: Label = new SpecialLabel(Kind.Try)
- final val Catch: Label = new SpecialLabel(Kind.Catch)
- final val Filter: Label = new SpecialLabel(Kind.Filter)
- final val EndFilter: Label = new SpecialLabel(Kind.EndFilter)
- final val Finally: Label = new SpecialLabel(Kind.Finally)
- final val EndTry: Label = new SpecialLabel(Kind.EndTry)
-
- final class Kind() {}
-
- final object Kind {
- final val Normal: Kind = new Kind()
-
- final val NewScope: Kind = new Kind()
- final val EndScope: Kind = new Kind()
-
- final val Try: Kind = new Kind()
- final val Catch: Kind = new Kind()
- final val Filter: Kind = new Kind()
- final val EndFilter: Kind = new Kind()
- final val Finally: Kind = new Kind()
- final val EndTry: Kind = new Kind()
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
deleted file mode 100644
index 73bca4639f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-
-/**
- * Represents a local variable within a method or constructor.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class LocalBuilder(_slot : Int, localType : Type) extends Visitable {
-
- /**
- * the type of the local variable.
- */
- var LocalType : Type = localType
-
- // the name of the local variable
- var name : String = "L_" + slot
-
- // the slot occupied by this local in the corresponding ILGenerator
- var slot : Int = _slot
-
- /**
- * Sets the name of this local variable.
- */
- def SetLocalSymInfo(name : String) {
- this.name = name
- }
-
- override def toString() : String = name
-
- /**
- * the apply method for a visitor
- */
- def apply(v : Visitor) {
- v.caseLocalBuilder(this)
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
deleted file mode 100644
index 237d8fd728..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.MethodInfo
-import ch.epfl.lamp.compiler.msil.ParameterInfo
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import java.io.IOException
-
-/**
- * Defines and represents a method of a dynamic class.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class MethodBuilder(name: String, declType: Type, attrs: Int, returnType: Type, paramTypes: Array[Type])
- extends MethodInfo(name, declType, attrs, returnType, paramTypes)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /** Defines a parameter of this method. TODO: Parameters are indexed staring
- * from number 1 for the first parameter
- */
- def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
- val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
- params(pos) = param
- return param
- }
-
- /** Returns an ILGenerator for this method. */
- def GetILGenerator(): ILGenerator = {
- if (ilGenerator == null)
- throw new RuntimeException
- ("No code generator available for this method: " + this)
- return ilGenerator
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** The apply method for a visitor. */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseMethodBuilder(this)
- }
-
- //##########################################################################
-
- // the Intermediate Language Generator
- // it contains the method's body
- protected final val ilGenerator : ILGenerator =
- if (DeclaringType == null // global method
- || !DeclaringType.IsInterface())
- new ILGenerator(this)
- else null
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
deleted file mode 100644
index 981e855e0e..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import java.io.IOException
-
-/**
- * Defines and represents a module. Get an instance of ModuleBuilder
- * by calling DefineDynamicModule
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ModuleBuilder(name: String, fullname: String, scopeName: String, assembly: Assembly)
- extends Module(name, fullname, scopeName, assembly)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /**
- * Complete the global function definitions for this dynamic module.
- * This method should be called when the user is done with defining
- * all of the global functions within this dynamic module. After calling
- * this function, no more new global functions or new global data are
- * allowed.
- */
- def CreateGlobalFunctions() {
- if (globalsCreated)
- throw new RuntimeException("Global functions are already created")
- this.fields = fieldBuilders.toArray // (fields).asInstanceOf[Array[FieldInfo]]
- this.methods = methodBuilders.toArray // (methods).asInstanceOf[Array[MethodInfo]]
- globalsCreated = true
- }
-
- /**
- * Constructs a TypeBuilder for a type with the specified name
- */
- def DefineType(typeName: String): TypeBuilder = {
- return DefineType(typeName, 0, null, Type.EmptyTypes)
- }
-
- /**
- * Constructs a TypeBuilder for a type with the specified name
- * and specified attributes
- */
- def DefineType(typeName: String, attributes: Int): TypeBuilder = {
- return DefineType(typeName, attributes, null, Type.EmptyTypes)
- }
-
- /**
- * Constructs a TypeBuilder given type name, its attributes,
- * and the type that the defined type extends.
- */
- def DefineType(typeName: String, attributes: Int,
- baseType: Type): TypeBuilder = {
- return DefineType(typeName, attributes, baseType, Type.EmptyTypes)
- }
-
- /**
- * Constructs a TypeBuilder given the Full specification of a type,
- * Given the type name, attributes, the type that the defined type
- * extends, and the interfaces that the defined type implements.
- */
- def DefineType(typeName: String,
- attributes: Int,
- baseType: Type,
- interfaces: Array[Type]): TypeBuilder =
- {
- var t: Type = GetType(typeName) // Module.GetType(String)
- if (t != null)
- throw new RuntimeException
- ("Type [" + Assembly + "]" + typeName + "' already exists!")
- val `type` =
- new TypeBuilder(this, attributes, typeName, baseType, interfaces, null)
- addType(`type`)
- return `type`
- }
-
- /**
- * Defines a global method given its name, attributes, return type, and
- * parameter types.
- */
- def DefineGlobalMethod(name: String, attributes: Int,
- returnType: Type, paramTypes: Array[Type]): MethodBuilder =
- {
- val method =
- new MethodBuilder(name, null, attributes, returnType, paramTypes)
- methodBuilders += method
- return method
- }
-
-
- override def GetTypes(): Array[Type] = {
- val res = scala.collection.mutable.ArrayBuffer.empty[Type]
- val iter = typesMap.values().iterator
- while (iter.hasNext) {
- res += iter.next.asInstanceOf[Type]
- }
- return res.toArray
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
- // internal members
-
- var globalsCreated = false
- protected var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldInfo]
- protected var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodInfo]
-
- override def addType(t: Type): Type = {
- return super.addType(t)
- }
-
- //##########################################################################
-
- /**
- * the apply method for a visitor
- */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseModuleBuilder(this)
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
deleted file mode 100644
index 55c52109b6..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.File
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Iterator
-import java.util.Arrays
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.emit
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly into separate files. Then these files can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @author Daniel Lorch
- * @version 1.0
- */
-final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: String) extends ILPrinterVisitor {
- /**
- * Visit an AssemblyBuilder
- */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) {
- ILPrinterVisitor.currAssembly = assemblyBuilder
-
- // first get the entryPoint
- this.entryPoint = assemblyBuilder.EntryPoint
-
- // all external assemblies
- as = assemblyBuilder.getExternAssemblies()
- scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator)
-
- // print each module
- var m: Array[Module] = assemblyBuilder.GetModules()
- nomembers = true
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
-
- nomembers = false
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
- ILPrinterVisitor.currAssembly = null
- }
-
- /**
- * Visit a ModuleBuilder
- */
- @throws(classOf[IOException])
- def caseModuleBuilder(module: ModuleBuilder) {
- val assemblyBuilder = ILPrinterVisitor.currAssembly.asInstanceOf[AssemblyBuilder]
-
- // print module declaration
- currentModule = module
-
- // global methods typically contain the main method
- if (!module.globalsCreated)
- module.CreateGlobalFunctions()
-
- var m: Array[MethodInfo] = module.GetMethods()
-
- // "Types" contain all the classes
- var t: Array[Type] = module.GetTypes()
- for(i <- 0 until t.length) {
- val tBuilder = t(i).asInstanceOf[TypeBuilder]
- val sourceFilename = tBuilder.sourceFilename
- val sourceFilepath = new File(tBuilder.sourceFilepath).getCanonicalPath
- val sourcePath = new File(sourceFilesPath).getCanonicalPath
- var append = false
-
- if(!sourceFilepath.startsWith(sourcePath)) {
- throw new IOException("Source file " + sourceFilename + " must lie inside sourcepath " + sourcePath)
- }
-
- assert(sourceFilepath.endsWith(".scala"), "Source file doesn't end with .scala")
- val relativeFilename = sourceFilepath.substring(sourcePath.length, sourceFilepath.length() - 6) + ".msil"
- val fileName = new File(destPath, relativeFilename)
- if(assemblyBuilder.generatedFiles.contains(fileName.getPath)) {
- append = true
- } else {
- fileName.getParentFile().mkdirs()
- assemblyBuilder.generatedFiles += (fileName.getPath)
- }
-
- out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, append)))
- // only write assembly boilerplate and class prototypes
- if (!append && nomembers) {
- printAssemblyBoilerplate()
-
- print(".module \'"); print(module.Name); println("\'")
- printAttributes(module)
- }
-
- print(t(i).asInstanceOf[TypeBuilder])
- out.close()
- }
-
- // now write the global methods (typically contains the "main" method)
- if(!nomembers) {
- var globalMethods: File = new File(destPath, ILPrinterVisitor.currAssembly.GetName().Name + ".msil")
- val append = assemblyBuilder.generatedFiles.contains(globalMethods.getPath)
-
- out = new PrintWriter(new BufferedWriter(new FileWriter(globalMethods, append)))
-
- // make sure we're the first in the list (ilasm uses the first file name to guess the output file name)
- assemblyBuilder.generatedFiles.insert(0, globalMethods.getPath)
-
- // if this file hasn't been created by one of the classes, write boilerplate
- if(!append) {
- printAssemblyBoilerplate()
-
- print(".module \'"); print(module.Name); println("\'")
- printAttributes(module)
- }
-
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[MethodBuilder])
- }
-
- out.close()
- }
-
- currentModule = null
- }
-
-} // class MultipleFilesILPrinterVisitor
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
deleted file mode 100644
index b0c26884af..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
+++ /dev/null
@@ -1,1948 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/** Describes a Microsoft intermediate language (MSIL) instruction.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class OpCode extends Visitable {
- import OpCode._
-
- /** The Operation Code of Microsoft intermediate language (MSIL) instruction. */
- var CEE_opcode : Int = _
-
- /** The name of the Microsoft intermediate language (MSIL) instruction. */
- var CEE_string: String = _
-
- /** The type of Microsoft intermediate language (MSIL) instruction. */
- var CEE_code: Short = _
-
- /** How the Microsoft intermediate language (MSIL) instruction pops the stack. */
- var CEE_pop: Byte = _
-
- /** How the Microsoft intermediate language (MSIL) instruction pushes operand onto the stack. */
- var CEE_push: Byte = _
-
- /** Describes the type of flow control. */
- var CEE_flow: Byte = _
-
- /** ????? */
- var CEE_inline: Byte = _
-
- var CEE_length: Byte = _
-
- var CEE_popush: Byte = _
-
- /**
- * the apply method for a visitor
- */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseOpCode(this)
- }
-
- protected def length(): Byte = {
- val code = OpCode.length(CEE_code)
- val inline = OpCode.INLINE_length(CEE_inline)
- return if(inline < 0) { -1 } else { (code + inline).toByte }
- }
-
- protected def popush(): Byte = {
- val pop = OpCode.POP_size(CEE_pop)
- val push = OpCode.PUSH_size(CEE_push)
- return if(pop < 0 || push < 0) { OpCode.POPUSH_SPECIAL } else { (push - pop).toByte }
- }
-
- override def toString(): String = {
- return CEE_string
- }
-}
-
-object OpCode {
-
- //########################################################################
- // Common Execution Environment opcodes
-
- final val CEE_NOP : Int = 0x0000
- final val CEE_BREAK : Int = 0x0001
- final val CEE_LDARG_0 : Int = 0x0002
- final val CEE_LDARG_1 : Int = 0x0003
- final val CEE_LDARG_2 : Int = 0x0004
- final val CEE_LDARG_3 : Int = 0x0005
- final val CEE_LDLOC_0 : Int = 0x0006
- final val CEE_LDLOC_1 : Int = 0x0007
- final val CEE_LDLOC_2 : Int = 0x0008
- final val CEE_LDLOC_3 : Int = 0x0009
- final val CEE_STLOC_0 : Int = 0x000A
- final val CEE_STLOC_1 : Int = 0x000B
- final val CEE_STLOC_2 : Int = 0x000C
- final val CEE_STLOC_3 : Int = 0x000D
- final val CEE_LDARG_S : Int = 0x000E
- final val CEE_LDARGA_S : Int = 0x000F
- final val CEE_STARG_S : Int = 0x0010
- final val CEE_LDLOC_S : Int = 0x0011
- final val CEE_LDLOCA_S : Int = 0x0012
- final val CEE_STLOC_S : Int = 0x0013
- final val CEE_LDNULL : Int = 0x0014
- final val CEE_LDC_I4_M1 : Int = 0x0015
- final val CEE_LDC_I4_0 : Int = 0x0016
- final val CEE_LDC_I4_1 : Int = 0x0017
- final val CEE_LDC_I4_2 : Int = 0x0018
- final val CEE_LDC_I4_3 : Int = 0x0019
- final val CEE_LDC_I4_4 : Int = 0x001A
- final val CEE_LDC_I4_5 : Int = 0x001B
- final val CEE_LDC_I4_6 : Int = 0x001C
- final val CEE_LDC_I4_7 : Int = 0x001D
- final val CEE_LDC_I4_8 : Int = 0x001E
- final val CEE_LDC_I4_S : Int = 0x001F
- final val CEE_LDC_I4 : Int = 0x0020
- final val CEE_LDC_I8 : Int = 0x0021
- final val CEE_LDC_R4 : Int = 0x0022
- final val CEE_LDC_R8 : Int = 0x0023
- final val CEE_UNUSED49 : Int = 0x0024
- final val CEE_DUP : Int = 0x0025
- final val CEE_POP : Int = 0x0026
- final val CEE_JMP : Int = 0x0027
- final val CEE_CALL : Int = 0x0028
- final val CEE_CALLI : Int = 0x0029
- final val CEE_RET : Int = 0x002A
- final val CEE_BR_S : Int = 0x002B
- final val CEE_BRFALSE_S : Int = 0x002C
- final val CEE_BRTRUE_S : Int = 0x002D
- final val CEE_BEQ_S : Int = 0x002E
- final val CEE_BGE_S : Int = 0x002F
- final val CEE_BGT_S : Int = 0x0030
- final val CEE_BLE_S : Int = 0x0031
- final val CEE_BLT_S : Int = 0x0032
- final val CEE_BNE_UN_S : Int = 0x0033
- final val CEE_BGE_UN_S : Int = 0x0034
- final val CEE_BGT_UN_S : Int = 0x0035
- final val CEE_BLE_UN_S : Int = 0x0036
- final val CEE_BLT_UN_S : Int = 0x0037
- final val CEE_BR : Int = 0x0038
- final val CEE_BRFALSE : Int = 0x0039
- final val CEE_BRTRUE : Int = 0x003A
- final val CEE_BEQ : Int = 0x003B
- final val CEE_BGE : Int = 0x003C
- final val CEE_BGT : Int = 0x003D
- final val CEE_BLE : Int = 0x003E
- final val CEE_BLT : Int = 0x003F
- final val CEE_BNE_UN : Int = 0x0040
- final val CEE_BGE_UN : Int = 0x0041
- final val CEE_BGT_UN : Int = 0x0042
- final val CEE_BLE_UN : Int = 0x0043
- final val CEE_BLT_UN : Int = 0x0044
- final val CEE_SWITCH : Int = 0x0045
- final val CEE_LDIND_I1 : Int = 0x0046
- final val CEE_LDIND_U1 : Int = 0x0047
- final val CEE_LDIND_I2 : Int = 0x0048
- final val CEE_LDIND_U2 : Int = 0x0049
- final val CEE_LDIND_I4 : Int = 0x004A
- final val CEE_LDIND_U4 : Int = 0x004B
- final val CEE_LDIND_I8 : Int = 0x004C
- final val CEE_LDIND_I : Int = 0x004D
- final val CEE_LDIND_R4 : Int = 0x004E
- final val CEE_LDIND_R8 : Int = 0x004F
- final val CEE_LDIND_REF : Int = 0x0050
- final val CEE_STIND_REF : Int = 0x0051
- final val CEE_STIND_I1 : Int = 0x0052
- final val CEE_STIND_I2 : Int = 0x0053
- final val CEE_STIND_I4 : Int = 0x0054
- final val CEE_STIND_I8 : Int = 0x0055
- final val CEE_STIND_R4 : Int = 0x0056
- final val CEE_STIND_R8 : Int = 0x0057
- final val CEE_ADD : Int = 0x0058
- final val CEE_SUB : Int = 0x0059
- final val CEE_MUL : Int = 0x005A
- final val CEE_DIV : Int = 0x005B
- final val CEE_DIV_UN : Int = 0x005C
- final val CEE_REM : Int = 0x005D
- final val CEE_REM_UN : Int = 0x005E
- final val CEE_AND : Int = 0x005F
- final val CEE_OR : Int = 0x0060
- final val CEE_XOR : Int = 0x0061
- final val CEE_SHL : Int = 0x0062
- final val CEE_SHR : Int = 0x0063
- final val CEE_SHR_UN : Int = 0x0064
- final val CEE_NEG : Int = 0x0065
- final val CEE_NOT : Int = 0x0066
- final val CEE_CONV_I1 : Int = 0x0067
- final val CEE_CONV_I2 : Int = 0x0068
- final val CEE_CONV_I4 : Int = 0x0069
- final val CEE_CONV_I8 : Int = 0x006A
- final val CEE_CONV_R4 : Int = 0x006B
- final val CEE_CONV_R8 : Int = 0x006C
- final val CEE_CONV_U4 : Int = 0x006D
- final val CEE_CONV_U8 : Int = 0x006E
- final val CEE_CALLVIRT : Int = 0x006F
- final val CEE_CPOBJ : Int = 0x0070
- final val CEE_LDOBJ : Int = 0x0071
- final val CEE_LDSTR : Int = 0x0072
- final val CEE_NEWOBJ : Int = 0x0073
- final val CEE_CASTCLASS : Int = 0x0074
- final val CEE_ISINST : Int = 0x0075
- final val CEE_CONV_R_UN : Int = 0x0076
- final val CEE_UNUSED58 : Int = 0x0077
- final val CEE_UNUSED1 : Int = 0x0078
- final val CEE_UNBOX : Int = 0x0079
- final val CEE_THROW : Int = 0x007A
- final val CEE_LDFLD : Int = 0x007B
- final val CEE_LDFLDA : Int = 0x007C
- final val CEE_STFLD : Int = 0x007D
- final val CEE_LDSFLD : Int = 0x007E
- final val CEE_LDSFLDA : Int = 0x007F
- final val CEE_STSFLD : Int = 0x0080
- final val CEE_STOBJ : Int = 0x0081
- final val CEE_CONV_OVF_I1_UN : Int = 0x0082
- final val CEE_CONV_OVF_I2_UN : Int = 0x0083
- final val CEE_CONV_OVF_I4_UN : Int = 0x0084
- final val CEE_CONV_OVF_I8_UN : Int = 0x0085
- final val CEE_CONV_OVF_U1_UN : Int = 0x0086
- final val CEE_CONV_OVF_U2_UN : Int = 0x0087
- final val CEE_CONV_OVF_U4_UN : Int = 0x0088
- final val CEE_CONV_OVF_U8_UN : Int = 0x0089
- final val CEE_CONV_OVF_I_UN : Int = 0x008A
- final val CEE_CONV_OVF_U_UN : Int = 0x008B
- final val CEE_BOX : Int = 0x008C
- final val CEE_NEWARR : Int = 0x008D
- final val CEE_LDLEN : Int = 0x008E
- final val CEE_LDELEMA : Int = 0x008F
- final val CEE_LDELEM_I1 : Int = 0x0090
- final val CEE_LDELEM_U1 : Int = 0x0091
- final val CEE_LDELEM_I2 : Int = 0x0092
- final val CEE_LDELEM_U2 : Int = 0x0093
- final val CEE_LDELEM_I4 : Int = 0x0094
- final val CEE_LDELEM_U4 : Int = 0x0095
- final val CEE_LDELEM_I8 : Int = 0x0096
- final val CEE_LDELEM_I : Int = 0x0097
- final val CEE_LDELEM_R4 : Int = 0x0098
- final val CEE_LDELEM_R8 : Int = 0x0099
- final val CEE_LDELEM_REF : Int = 0x009A
- final val CEE_STELEM_I : Int = 0x009B
- final val CEE_STELEM_I1 : Int = 0x009C
- final val CEE_STELEM_I2 : Int = 0x009D
- final val CEE_STELEM_I4 : Int = 0x009E
- final val CEE_STELEM_I8 : Int = 0x009F
- final val CEE_STELEM_R4 : Int = 0x00A0
- final val CEE_STELEM_R8 : Int = 0x00A1
- final val CEE_STELEM_REF : Int = 0x00A2
- final val CEE_UNUSED2 : Int = 0x00A3
- final val CEE_UNUSED3 : Int = 0x00A4
- final val CEE_UNUSED4 : Int = 0x00A5
- final val CEE_UNUSED5 : Int = 0x00A6
- final val CEE_UNUSED6 : Int = 0x00A7
- final val CEE_UNUSED7 : Int = 0x00A8
- final val CEE_UNUSED8 : Int = 0x00A9
- final val CEE_UNUSED9 : Int = 0x00AA
- final val CEE_UNUSED10 : Int = 0x00AB
- final val CEE_UNUSED11 : Int = 0x00AC
- final val CEE_UNUSED12 : Int = 0x00AD
- final val CEE_UNUSED13 : Int = 0x00AE
- final val CEE_UNUSED14 : Int = 0x00AF
- final val CEE_UNUSED15 : Int = 0x00B0
- final val CEE_UNUSED16 : Int = 0x00B1
- final val CEE_UNUSED17 : Int = 0x00B2
- final val CEE_CONV_OVF_I1 : Int = 0x00B3
- final val CEE_CONV_OVF_U1 : Int = 0x00B4
- final val CEE_CONV_OVF_I2 : Int = 0x00B5
- final val CEE_CONV_OVF_U2 : Int = 0x00B6
- final val CEE_CONV_OVF_I4 : Int = 0x00B7
- final val CEE_CONV_OVF_U4 : Int = 0x00B8
- final val CEE_CONV_OVF_I8 : Int = 0x00B9
- final val CEE_CONV_OVF_U8 : Int = 0x00BA
- final val CEE_UNUSED50 : Int = 0x00BB
- final val CEE_UNUSED18 : Int = 0x00BC
- final val CEE_UNUSED19 : Int = 0x00BD
- final val CEE_UNUSED20 : Int = 0x00BE
- final val CEE_UNUSED21 : Int = 0x00BF
- final val CEE_UNUSED22 : Int = 0x00C0
- final val CEE_UNUSED23 : Int = 0x00C1
- final val CEE_REFANYVAL : Int = 0x00C2
- final val CEE_CKFINITE : Int = 0x00C3
- final val CEE_UNUSED24 : Int = 0x00C4
- final val CEE_UNUSED25 : Int = 0x00C5
- final val CEE_MKREFANY : Int = 0x00C6
- final val CEE_UNUSED59 : Int = 0x00C7
- final val CEE_UNUSED60 : Int = 0x00C8
- final val CEE_UNUSED61 : Int = 0x00C9
- final val CEE_UNUSED62 : Int = 0x00CA
- final val CEE_UNUSED63 : Int = 0x00CB
- final val CEE_UNUSED64 : Int = 0x00CC
- final val CEE_UNUSED65 : Int = 0x00CD
- final val CEE_UNUSED66 : Int = 0x00CE
- final val CEE_UNUSED67 : Int = 0x00CF
- final val CEE_LDTOKEN : Int = 0x00D0
- final val CEE_CONV_U2 : Int = 0x00D1
- final val CEE_CONV_U1 : Int = 0x00D2
- final val CEE_CONV_I : Int = 0x00D3
- final val CEE_CONV_OVF_I : Int = 0x00D4
- final val CEE_CONV_OVF_U : Int = 0x00D5
- final val CEE_ADD_OVF : Int = 0x00D6
- final val CEE_ADD_OVF_UN : Int = 0x00D7
- final val CEE_MUL_OVF : Int = 0x00D8
- final val CEE_MUL_OVF_UN : Int = 0x00D9
- final val CEE_SUB_OVF : Int = 0x00DA
- final val CEE_SUB_OVF_UN : Int = 0x00DB
- final val CEE_ENDFINALLY : Int = 0x00DC
- final val CEE_LEAVE : Int = 0x00DD
- final val CEE_LEAVE_S : Int = 0x00DE
- final val CEE_STIND_I : Int = 0x00DF
- final val CEE_CONV_U : Int = 0x00E0
- final val CEE_UNUSED26 : Int = 0x00E1
- final val CEE_UNUSED27 : Int = 0x00E2
- final val CEE_UNUSED28 : Int = 0x00E3
- final val CEE_UNUSED29 : Int = 0x00E4
- final val CEE_UNUSED30 : Int = 0x00E5
- final val CEE_UNUSED31 : Int = 0x00E6
- final val CEE_UNUSED32 : Int = 0x00E7
- final val CEE_UNUSED33 : Int = 0x00E8
- final val CEE_UNUSED34 : Int = 0x00E9
- final val CEE_UNUSED35 : Int = 0x00EA
- final val CEE_UNUSED36 : Int = 0x00EB
- final val CEE_UNUSED37 : Int = 0x00EC
- final val CEE_UNUSED38 : Int = 0x00ED
- final val CEE_UNUSED39 : Int = 0x00EE
- final val CEE_UNUSED40 : Int = 0x00EF
- final val CEE_UNUSED41 : Int = 0x00F0
- final val CEE_UNUSED42 : Int = 0x00F1
- final val CEE_UNUSED43 : Int = 0x00F2
- final val CEE_UNUSED44 : Int = 0x00F3
- final val CEE_UNUSED45 : Int = 0x00F4
- final val CEE_UNUSED46 : Int = 0x00F5
- final val CEE_UNUSED47 : Int = 0x00F6
- final val CEE_UNUSED48 : Int = 0x00F7
- final val CEE_PREFIX7 : Int = 0x00F8
- final val CEE_PREFIX6 : Int = 0x00F9
- final val CEE_PREFIX5 : Int = 0x00FA
- final val CEE_PREFIX4 : Int = 0x00FB
- final val CEE_PREFIX3 : Int = 0x00FC
- final val CEE_PREFIX2 : Int = 0x00FD
- final val CEE_PREFIX1 : Int = 0x00FE
- final val CEE_PREFIXREF : Int = 0x00FF
-
- final val CEE_ARGLIST : Int = 0x0100
- final val CEE_CEQ : Int = 0x0101
- final val CEE_CGT : Int = 0x0102
- final val CEE_CGT_UN : Int = 0x0103
- final val CEE_CLT : Int = 0x0104
- final val CEE_CLT_UN : Int = 0x0105
- final val CEE_LDFTN : Int = 0x0106
- final val CEE_LDVIRTFTN : Int = 0x0107
- final val CEE_UNUSED56 : Int = 0x0108
- final val CEE_LDARG : Int = 0x0109
- final val CEE_LDARGA : Int = 0x010A
- final val CEE_STARG : Int = 0x010B
- final val CEE_LDLOC : Int = 0x010C
- final val CEE_LDLOCA : Int = 0x010D
- final val CEE_STLOC : Int = 0x010E
- final val CEE_LOCALLOC : Int = 0x010F
- final val CEE_UNUSED57 : Int = 0x0110
- final val CEE_ENDFILTER : Int = 0x0111
- final val CEE_UNALIGNED : Int = 0x0112
- final val CEE_VOLATILE : Int = 0x0113
- final val CEE_TAILCALL : Int = 0x0114
- final val CEE_INITOBJ : Int = 0x0115
- final val CEE_CONSTRAINED : Int = 0xFE16
- final val CEE_READONLY : Int = 0xFE1E
- final val CEE_UNUSED68 : Int = 0x0116
- final val CEE_CPBLK : Int = 0x0117
- final val CEE_INITBLK : Int = 0x0118
- final val CEE_UNUSED69 : Int = 0x0119
- final val CEE_RETHROW : Int = 0x011A
- final val CEE_UNUSED51 : Int = 0x011B
- final val CEE_SIZEOF : Int = 0x011C
- final val CEE_REFANYTYPE : Int = 0x011D
- final val CEE_UNUSED52 : Int = 0x011E
- final val CEE_UNUSED53 : Int = 0x011F
- final val CEE_UNUSED54 : Int = 0x0120
- final val CEE_UNUSED55 : Int = 0x0121
- final val CEE_UNUSED70 : Int = 0x0122
-
- final val CEE_ILLEGAL : Int = 0x0140
- final val CEE_MACRO_END : Int = 0x0141
-
- final val CEE_BRNULL : Int = 0x0180 // CEE_BRFALSE
- final val CEE_BRNULL_S : Int = 0x0181 // CEE_BRFALSE_S
- final val CEE_BRZERO : Int = 0x0182 // CEE_BRFALSE
- final val CEE_BRZERO_S : Int = 0x0183 // CEE_BRFALSE_S
- final val CEE_BRINST : Int = 0x0184 // CEE_BRTRUE
- final val CEE_BRINST_S : Int = 0x0185 // CEE_BRTRUE_S
- final val CEE_LDIND_U8 : Int = 0x0186 // CEE_LDIND_I8
- final val CEE_LDELEM_U8 : Int = 0x0187 // CEE_LDELEM_I8
- final val CEE_LDC_I4_M1x : Int = 0x0188 // CEE_LDC_I4_M1
- final val CEE_ENDFAULT : Int = 0x0189 // CEE_ENDFINALLY
-
- final val CEE_BRNONZERO : Int = 0x01C0 // CEE_BRTRUE
- final val CEE_BRNONZERO_S : Int = 0x01C1 // CEE_BRTRUE_S
-
- final val CEE_BRNOT : Int = 0x01C2
- final val CEE_BRNOT_S : Int = 0x01C3
- final val CEE_NOCODE : Int = 0x01C4
-
- final val CEE_count : Int = 0x0200
-
-
- //########################################################################
- // Opcode's amount and type of poped data
-
- final val POP_NONE : Byte = 0x00
- final val POP_1 : Byte = 0x01
- final val POP_1_1 : Byte = 0x02
- final val POP_I : Byte = 0x03
- final val POP_I_1 : Byte = 0x04
- final val POP_I_I : Byte = 0x05
- final val POP_I_I8 : Byte = 0x06
- final val POP_I_R4 : Byte = 0x07
- final val POP_I_R8 : Byte = 0x08
- final val POP_I_I_I : Byte = 0x09
- final val POP_REF : Byte = 0x0A
- final val POP_REF_1 : Byte = 0x0B
- final val POP_REF_I : Byte = 0x0C
- final val POP_REF_I_I : Byte = 0x0D
- final val POP_REF_I_I8 : Byte = 0x0E
- final val POP_REF_I_R4 : Byte = 0x0F
- final val POP_REF_I_R8 : Byte = 0x10
- final val POP_REF_I_REF : Byte = 0x11
- final val POP_SPECIAL : Byte = 0x12
- final val POP_count : Int = 0x13
- final val POP_size : Array[Byte] = new Array[Byte](POP_count)
-
- POP_size(POP_NONE) = 0
- POP_size(POP_1) = 1
- POP_size(POP_1_1) = 2
- POP_size(POP_I) = 1
- POP_size(POP_I_1) = 2
- POP_size(POP_I_I) = 2
- POP_size(POP_I_I8) = 2
- POP_size(POP_I_R4) = 2
- POP_size(POP_I_R8) = 2
- POP_size(POP_I_I_I) = 3
- POP_size(POP_REF) = 1
- POP_size(POP_REF_1) = 2
- POP_size(POP_REF_I) = 2
- POP_size(POP_REF_I_I) = 3
- POP_size(POP_REF_I_I8) = 3
- POP_size(POP_REF_I_R4) = 3
- POP_size(POP_REF_I_R8) = 3
- POP_size(POP_REF_I_REF) = 3
- POP_size(POP_SPECIAL) = -1
-
- //########################################################################
- // Opcode's amount and type of pushed data
-
- final val PUSH_NONE : Byte = 0x00
- final val PUSH_1 : Byte = 0x01
- final val PUSH_1_1 : Byte = 0x02
- final val PUSH_I : Byte = 0x03
- final val PUSH_I8 : Byte = 0x04
- final val PUSH_R4 : Byte = 0x05
- final val PUSH_R8 : Byte = 0x06
- final val PUSH_REF : Byte = 0x07
- final val PUSH_SPECIAL : Byte = 0x08
- final val PUSH_count : Int = 0x09
- final val PUSH_size : Array[Byte] = new Array[Byte](PUSH_count)
-
- PUSH_size(PUSH_NONE) = 0
- PUSH_size(PUSH_1) = 1
- PUSH_size(PUSH_1_1) = 2
- PUSH_size(PUSH_I) = 1
- PUSH_size(PUSH_I8) = 1
- PUSH_size(PUSH_R4) = 1
- PUSH_size(PUSH_R8) = 1
- PUSH_size(PUSH_REF) = 1
- PUSH_size(PUSH_SPECIAL) = -1
-
- //########################################################################
- // Opcode's amount of moved data
-
- final val POPUSH_SPECIAL : Byte = -128
-
- //########################################################################
- // Opcode's inline argument types
-
- final val INLINE_NONE : Byte = 0x00
- final val INLINE_VARIABLE_S : Byte = 0x01
- final val INLINE_TARGET_S : Byte = 0x02
- final val INLINE_I_S : Byte = 0x03
- final val INLINE_VARIABLE : Byte = 0x04
- final val INLINE_TARGET : Byte = 0x05
- final val INLINE_I : Byte = 0x06
- final val INLINE_I8 : Byte = 0x07
- final val INLINE_R : Byte = 0x08
- final val INLINE_R8 : Byte = 0x09
- final val INLINE_STRING : Byte = 0x0A
- final val INLINE_TYPE : Byte = 0x0B
- final val INLINE_FIELD : Byte = 0x0C
- final val INLINE_METHOD : Byte = 0x0D
- final val INLINE_SIGNATURE : Byte = 0x0E
- final val INLINE_TOKEN : Byte = 0x0F
- final val INLINE_SWITCH : Byte = 0x10
- final val INLINE_count : Int = 0x11
- final val INLINE_length : Array[Byte] = new Array[Byte](INLINE_count)
-
- INLINE_length(INLINE_NONE) = 0
- INLINE_length(INLINE_VARIABLE_S) = 1
- INLINE_length(INLINE_TARGET_S) = 1
- INLINE_length(INLINE_I_S) = 1
- INLINE_length(INLINE_VARIABLE) = 2
- INLINE_length(INLINE_TARGET) = 4
- INLINE_length(INLINE_I) = 4
- INLINE_length(INLINE_I8) = 8
- INLINE_length(INLINE_R) = 4
- INLINE_length(INLINE_R8) = 8
- INLINE_length(INLINE_STRING) = 4
- INLINE_length(INLINE_TYPE) = 4
- INLINE_length(INLINE_FIELD) = 4
- INLINE_length(INLINE_METHOD) = 4
- INLINE_length(INLINE_SIGNATURE) = 4
- INLINE_length(INLINE_SWITCH) = 4
- INLINE_length(INLINE_TOKEN) = 4
-
- //########################################################################
- // Opcode's control flow implications
-
- final val FLOW_META : Byte = 0x00
- final val FLOW_NEXT : Byte = 0x01
- final val FLOW_BRANCH : Byte = 0x02
- final val FLOW_COND_BRANCH : Byte = 0x03
- final val FLOW_BREAK : Byte = 0x04
- final val FLOW_CALL : Byte = 0x05
- final val FLOW_RETURN : Byte = 0x06
- final val FLOW_THROW : Byte = 0x07
- final val FLOW_count : Int = 0x08
-
- //########################################################################
- // Init methods for Opcode
-
- def opcode(that: OpCode, opcode: Int, string: String, code: Int,
- pop: Byte, push: Byte, inline: Byte, flow: Byte) {
- that.CEE_opcode = opcode
- that.CEE_string = string
- that.CEE_code = code.toShort
- that.CEE_pop = pop
- that.CEE_push = push
- that.CEE_inline = inline
- that.CEE_flow = flow
- that.CEE_length = that.length()
- that.CEE_popush = that.popush()
- }
-
- def length(code: Int): Byte = {
- if ((code & 0xFFFFFF00) == 0xFFFFFF00) return 1
- if ((code & 0xFFFFFF00) == 0xFFFFFE00) return 2
- return 0
- }
-
- //########################################################################
- // case OpCode
-
- /**
- * Adds two values and pushes the result onto the evaluation stack.
- */
- final val Add = new OpCode()
- opcode(Add, CEE_ADD, "add", 0xFFFFFF58, POP_1_1, PUSH_1, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Fills space if bytecodes are patched. No meaningful operation is performed
- * although a processing cycle can be consumed.
- */
- final val Nop = new OpCode()
- opcode(Nop, CEE_NOP, "nop", 0xFFFFFF00, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Signals the Common Language Infrastructure (CLI) to inform the debugger that
- * a break point has been tripped.
- */
- final val Break = new OpCode()
- opcode(Break, CEE_BREAK, "break" , 0xFFFFFF01, POP_NONE, PUSH_NONE , INLINE_NONE , FLOW_BREAK)
-
- /**
- * Loads the argument at index 0 onto the evaluation stack.
- */
- final val Ldarg_0 = new OpCode()
- opcode(Ldarg_0, CEE_LDARG_0 , "ldarg.0" , 0xFFFFFF02, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument at index 1 onto the evaluation stack.
- */
- final val Ldarg_1 = new OpCode()
- opcode(Ldarg_1, CEE_LDARG_1 , "ldarg.1" , 0xFFFFFF03, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument at index 2 onto the evaluation stack.
- */
- final val Ldarg_2 = new OpCode()
- opcode(Ldarg_2, CEE_LDARG_2 , "ldarg.2" , 0xFFFFFF04, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument at index 3 onto the evaluation stack.
- */
- final val Ldarg_3 = new OpCode()
- opcode(Ldarg_3, CEE_LDARG_3 , "ldarg.3" , 0xFFFFFF05, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 0 onto the evaluation stack.
- */
- final val Ldloc_0 = new OpCode()
- opcode(Ldloc_0, CEE_LDLOC_0 , "ldloc.0" , 0xFFFFFF06, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 1 onto the evaluation stack.
- */
- final val Ldloc_1 = new OpCode()
- opcode(Ldloc_1, CEE_LDLOC_1 , "ldloc.1" , 0xFFFFFF07, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 2 onto the evaluation stack.
- */
- final val Ldloc_2 = new OpCode()
- opcode(Ldloc_2, CEE_LDLOC_2 , "ldloc.2" , 0xFFFFFF08, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 3 onto the evaluation stack.
- */
- final val Ldloc_3 = new OpCode()
- opcode(Ldloc_3, CEE_LDLOC_3 , "ldloc.3" , 0xFFFFFF09, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 0.
- */
- final val Stloc_0 = new OpCode()
- opcode(Stloc_0, CEE_STLOC_0 , "stloc.0" , 0xFFFFFF0A, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 1.
- */
- final val Stloc_1 = new OpCode()
- opcode(Stloc_1, CEE_STLOC_1 , "stloc.1" , 0xFFFFFF0B, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 2.
- */
- final val Stloc_2 = new OpCode()
- opcode(Stloc_2, CEE_STLOC_2 , "stloc.2" , 0xFFFFFF0C, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 3.
- */
- final val Stloc_3 = new OpCode()
- opcode(Stloc_3, CEE_STLOC_3 , "stloc.3" , 0xFFFFFF0D, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument (referenced by a specified short form index)
- * onto the evaluation stack.
- */
- final val Ldarg_S = new OpCode()
- opcode(Ldarg_S, CEE_LDARG_S , "ldarg.s" , 0xFFFFFF0E, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Load an argument address, in short form, onto the evaluation stack.
- */
- final val Ldarga_S = new OpCode()
- opcode(Ldarga_S, CEE_LDARGA_S , "ldarga.s" , 0xFFFFFF0F, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack,
- * short form.
- */
- final val Ldloc_S = new OpCode()
- opcode(Ldloc_S, CEE_LDLOC_S , "ldloc.s" , 0xFFFFFF11, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Loads the address of the local variable at a specific index onto
- * the evaluation stack, short form.
- */
- final val Ldloca_S = new OpCode()
- opcode(Ldloca_S, CEE_LDLOCA_S , "ldloca.s" , 0xFFFFFF12, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot
- * at a specified index, short form.
- */
- final val Starg_S = new OpCode()
- opcode(Starg_S, CEE_STARG_S , "starg.s" , 0xFFFFFF10, POP_1 , PUSH_NONE , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it
- * in a the local variable list at index (short form).
- */
- final val Stloc_S = new OpCode()
- opcode(Stloc_S, CEE_STLOC_S , "stloc.s" , 0xFFFFFF13, POP_1 , PUSH_NONE, INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Pushes a null reference (type O) onto the evaluation stack.
- */
- final val Ldnull = new OpCode()
- opcode(Ldnull, CEE_LDNULL , "ldnull" , 0xFFFFFF14, POP_NONE, PUSH_REF , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of -1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_M1 = new OpCode()
- opcode(Ldc_I4_M1, CEE_LDC_I4_M1, "ldc.i4.m1", 0xFFFFFF15, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 0 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_0 = new OpCode()
- opcode(Ldc_I4_0, CEE_LDC_I4_0 , "ldc.i4.0" , 0xFFFFFF16, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_1 = new OpCode()
- opcode(Ldc_I4_1, CEE_LDC_I4_1 , "ldc.i4.1" , 0xFFFFFF17, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 2 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_2 = new OpCode()
- opcode(Ldc_I4_2, CEE_LDC_I4_2 , "ldc.i4.2" , 0xFFFFFF18, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 3 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_3 = new OpCode()
- opcode(Ldc_I4_3, CEE_LDC_I4_3 , "ldc.i4.3" , 0xFFFFFF19, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 4 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_4 = new OpCode()
- opcode(Ldc_I4_4, CEE_LDC_I4_4 , "ldc.i4.4" , 0xFFFFFF1A, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 5 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_5 = new OpCode()
- opcode(Ldc_I4_5, CEE_LDC_I4_5 , "ldc.i4.5" , 0xFFFFFF1B, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 6 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_6 = new OpCode()
- opcode(Ldc_I4_6, CEE_LDC_I4_6 , "ldc.i4.6", 0xFFFFFF1C, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 7 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_7 = new OpCode()
- opcode(Ldc_I4_7, CEE_LDC_I4_7 , "ldc.i4.7", 0xFFFFFF1D, POP_NONE , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 8 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_8 = new OpCode()
- opcode(Ldc_I4_8, CEE_LDC_I4_8 , "ldc.i4.8", 0xFFFFFF1E, POP_NONE , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the supplied int8 value onto the evaluation stack as an int32, short form.
- */
- final val Ldc_I4_S = new OpCode()
- opcode(Ldc_I4_S, CEE_LDC_I4_S , "ldc.i4.s", 0xFFFFFF1F, POP_NONE , PUSH_I, INLINE_I_S, FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type int32 onto the evaluation stack as an int32.
- */
- final val Ldc_I4 = new OpCode()
- opcode(Ldc_I4, CEE_LDC_I4, "ldc.i4" , 0xFFFFFF20, POP_NONE , PUSH_I, INLINE_I , FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type int64 onto the evaluation stack as an int64.
- */
- final val Ldc_I8 = new OpCode()
- opcode(Ldc_I8, CEE_LDC_I8, "ldc.i8" , 0xFFFFFF21, POP_NONE , PUSH_I8, INLINE_I8 , FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type float32 onto the evaluation stack as type F (float).
- */
- final val Ldc_R4 = new OpCode()
- opcode(Ldc_R4, CEE_LDC_R4, "ldc.r4" , 0xFFFFFF22, POP_NONE , PUSH_R4, INLINE_R , FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type float64 onto the evaluation stack as type F (float).
- */
- final val Ldc_R8 = new OpCode()
- opcode(Ldc_R8, CEE_LDC_R8, "ldc.r8" , 0xFFFFFF23, POP_NONE , PUSH_R8, INLINE_R8 , FLOW_NEXT)
-
- /**
- * Copies the current topmost value on the evaluation stack, and then pushes the copy
- * onto the evaluation stack.
- */
- final val Dup = new OpCode()
- opcode(Dup, CEE_DUP , "dup" , 0xFFFFFF25, POP_1 , PUSH_1_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Removes the value currently on top of the evaluation stack.
- */
- final val Pop = new OpCode()
- opcode(Pop, CEE_POP , "pop" , 0xFFFFFF26, POP_1 , PUSH_NONE , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Exits current method and jumps to specified method.
- */
- final val Jmp = new OpCode()
- opcode(Jmp, CEE_JMP , "jmp" , 0xFFFFFF27, POP_NONE , PUSH_NONE , INLINE_METHOD, FLOW_CALL)
-
- /**
- * Calls the method indicated by the passed method descriptor.
- */
- final val Call = new OpCode()
- opcode(Call, CEE_CALL , "call" , 0xFFFFFF28, POP_SPECIAL, PUSH_SPECIAL, INLINE_METHOD , FLOW_CALL)
-
- /**
- * constrained prefix
- */
- final val Constrained = new OpCode()
-opcode(Constrained, CEE_CONSTRAINED , "constrained." , 0xFFFFFE16, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * readonly prefix
- */
- final val Readonly = new OpCode()
-opcode(Readonly, CEE_READONLY , "readonly." , 0xFFFFFE1E, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Calls the method indicated on the evaluation stack (as a pointer to an entry point)
- * with arguments described by a calling convention.
- */
- final val Calli = new OpCode()
- opcode(Calli, CEE_CALLI, "calli" , 0xFFFFFF29, POP_SPECIAL, PUSH_SPECIAL, INLINE_SIGNATURE , FLOW_CALL)
-
- /**
- * Returns from the current method, pushing a return value (if present) from the caller's
- * evaluation stack onto the callee's evaluation stack.
- */
- final val Ret = new OpCode()
- opcode(Ret, CEE_RET , "ret" , 0xFFFFFF2A, POP_SPECIAL, PUSH_NONE, INLINE_NONE , FLOW_RETURN)
-
- /**
- * Unconditionally transfers control to a target instruction (short form).
- */
- final val Br_S = new OpCode()
- opcode(Br_S, CEE_BR_S , "br.s" , 0xFFFFFF2B, POP_NONE, PUSH_NONE, INLINE_TARGET_S , FLOW_BRANCH)
-
- /**
- * Transfers control to a target instruction if value is false, a null reference, or zero.
- */
- final val Brfalse_S = new OpCode()
- opcode(Brfalse_S, CEE_BRFALSE_S,"brfalse.s", 0xFFFFFF2C, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if value is true, not null, or non-zero.
- */
- final val Brtrue_S = new OpCode()
- opcode(Brtrue_S, CEE_BRTRUE_S , "brtrue.s", 0xFFFFFF2D, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if two values are equal.
- */
- final val Beq_S = new OpCode()
- opcode(Beq_S, CEE_BEQ_S, "beq.s", 0xFFFFFF2E, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * or equal to the second value.
- */
- final val Bge_S = new OpCode()
- opcode(Bge_S, CEE_BGE_S, "bge.s", 0xFFFFFF2F, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value.
- */
- final val Bgt_S = new OpCode()
- opcode(Bgt_S, CEE_BGT_S, "bgt.s" , 0xFFFFFF30, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value.
- */
- final val Ble_S = new OpCode()
- opcode(Ble_S, CEE_BLE_S, "ble.s" , 0xFFFFFF31, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value.
- */
- final val Blt_S = new OpCode()
- opcode(Blt_S, CEE_BLT_S, "blt.s", 0xFFFFFF32, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) when two unsigned integer values
- * or unordered float values are not equal.
- */
- final val Bne_Un_S = new OpCode()
- opcode(Bne_Un_S, CEE_BNE_UN_S, "bne.un.s", 0xFFFFFF33, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greather
- * than the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un_S = new OpCode()
- opcode(Bge_Un_S, CEE_BGE_UN_S, "bge.un.s", 0xFFFFFF34, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un_S = new OpCode()
- opcode(Bgt_Un_S, CEE_BGT_UN_S, "bgt.un.s", 0xFFFFFF35, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un_S = new OpCode()
- opcode(Ble_Un_S, CEE_BLE_UN_S , "ble.un.s", 0xFFFFFF36, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un_S = new OpCode()
- opcode(Blt_Un_S, CEE_BLT_UN_S, "blt.un.s", 0xFFFFFF37, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Unconditionally transfers control to a target instruction.
- */
- final val Br = new OpCode()
- opcode(Br, CEE_BR , "br" , 0xFFFFFF38, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH)
-
- /**
- * Transfers control to a target instruction if value is false, a null reference
- * (Nothing in Visual Basic), or zero.
- */
- final val Brfalse = new OpCode()
- opcode(Brfalse, CEE_BRFALSE, "brfalse", 0xFFFFFF39, POP_I, PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if value is true, not null, or non-zero.
- */
- final val Brtrue = new OpCode()
- opcode(Brtrue, CEE_BRTRUE , "brtrue", 0xFFFFFF3A, POP_I , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if two values are equal.
- */
- final val Beq = new OpCode()
- opcode(Beq, CEE_BEQ, "beq", 0xFFFFFF3B, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greater than or
- * equal to the second value.
- */
- final val Bge = new OpCode()
- opcode(Bge, CEE_BGE, "bge", 0xFFFFFF3C, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greater than the second value.
- */
- final val Bgt = new OpCode()
- opcode(Bgt, CEE_BGT, "bgt", 0xFFFFFF3D, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal
- * to the second value.
- */
- final val Ble = new OpCode()
- opcode(Ble, CEE_BLE, "ble", 0xFFFFFF3E, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value.
- */
- final val Blt = new OpCode()
- opcode(Blt, CEE_BLT, "blt", 0xFFFFFF3F, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction when two unsigned integer values or
- * unordered float values are not equal.
- */
- final val Bne_Un = new OpCode()
- opcode(Bne_Un, CEE_BNE_UN , "bne.un", 0xFFFFFF40, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greather than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un = new OpCode()
- opcode(Bge_Un, CEE_BGE_UN , "bge.un", 0xFFFFFF41, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greater than the
- * second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un = new OpCode()
- opcode(Bgt_Un, CEE_BGT_UN , "bgt.un", 0xFFFFFF42, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal to
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un = new OpCode()
- opcode(Ble_Un, CEE_BLE_UN , "ble.un" , 0xFFFFFF43, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value,
- * when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un = new OpCode()
- opcode(Blt_Un, CEE_BLT_UN , "blt.un", 0xFFFFFF44, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Implements a jump table.
- */
- final val Switch = new OpCode()
- opcode(Switch, CEE_SWITCH , "switch", 0xFFFFFF45, POP_I , PUSH_NONE, INLINE_SWITCH, FLOW_COND_BRANCH)
-
- /**
- * Loads a value of type int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I1 = new OpCode()
- opcode(Ldind_I1, CEE_LDIND_I1 , "ldind.i1" , 0xFFFFFF46, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I2 = new OpCode()
- opcode(Ldind_I2, CEE_LDIND_I2 , "ldind.i2" , 0xFFFFFF48, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I4 = new OpCode()
- opcode(Ldind_I4, CEE_LDIND_I4 , "ldind.i4" , 0xFFFFFF4A, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type int64 as an int64 onto the evaluation stack indirectly.
- */
- final val Ldind_I8 = new OpCode()
- opcode(Ldind_I8, CEE_LDIND_I8 , "ldind.i8" , 0xFFFFFF4C, POP_I , PUSH_I8 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type natural int as a natural int onto the evaluation stack indirectly.
- */
- final val Ldind_I = new OpCode()
- opcode(Ldind_I, CEE_LDIND_I , "ldind.i" , 0xFFFFFF4D, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R4 = new OpCode()
- opcode(Ldind_R4, CEE_LDIND_R4 , "ldind.r4" , 0xFFFFFF4E, POP_I , PUSH_R4 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R8 = new OpCode()
- opcode(Ldind_R8, CEE_LDIND_R8 , "ldind.r8" , 0xFFFFFF4F, POP_I , PUSH_R8 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly.
- */
- final val Ldind_Ref = new OpCode()
- opcode(Ldind_Ref, CEE_LDIND_REF, "ldind.ref", 0xFFFFFF50, POP_I , PUSH_REF, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U1 = new OpCode()
- opcode(Ldind_U1, CEE_LDIND_U1 , "ldind.u1" , 0xFFFFFF47, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U2 = new OpCode()
- opcode(Ldind_U2, CEE_LDIND_U2 , "ldind.u2" , 0xFFFFFF49, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U4 = new OpCode()
- opcode(Ldind_U4, CEE_LDIND_U4 , "ldind.u4" , 0xFFFFFF4B, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a object reference value at a supplied address.
- */
- final val Stind_Ref = new OpCode()
- opcode(Stind_Ref, CEE_STIND_REF, "stind.ref", 0xFFFFFF51, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int8 at a supplied address.
- */
- final val Stind_I1 = new OpCode()
- opcode(Stind_I1, CEE_STIND_I1 , "stind.i1", 0xFFFFFF52, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int16 at a supplied address.
- */
- final val Stind_I2 = new OpCode()
- opcode(Stind_I2, CEE_STIND_I2 , "stind.i2", 0xFFFFFF53, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int32 at a supplied address.
- */
- final val Stind_I4 = new OpCode()
- opcode(Stind_I4, CEE_STIND_I4 , "stind.i4", 0xFFFFFF54, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int64 at a supplied address.
- */
- final val Stind_I8 = new OpCode()
- opcode(Stind_I8, CEE_STIND_I8 , "stind.i8", 0xFFFFFF55, POP_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type float32 at a supplied address.
- */
- final val Stind_R4 = new OpCode()
- opcode(Stind_R4, CEE_STIND_R4 , "stind.r4", 0xFFFFFF56, POP_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type float64 at a supplied address.
- */
- final val Stind_R8 = new OpCode()
- opcode(Stind_R8, CEE_STIND_R8 , "stind.r8", 0xFFFFFF57, POP_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Subtracts one value from another and pushes the result onto the evaluation stack.
- */
- final val Sub = new OpCode()
- opcode(Sub, CEE_SUB, "sub" , 0xFFFFFF59, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Multiplies two values and pushes the result on the evaluation stack.
- */
- final val Mul = new OpCode()
- opcode(Mul, CEE_MUL, "mul" , 0xFFFFFF5A, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two values and pushes the result as a floating-point (type F) or
- * quotient (type int32) onto the evaluation stack.
- */
- final val Div = new OpCode()
- opcode(Div, CEE_DIV, "div" , 0xFFFFFF5B, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack.
- */
- final val Div_Un = new OpCode()
- opcode(Div_Un, CEE_DIV_UN, "div.un" , 0xFFFFFF5C, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two values and pushes the remainder onto the evaluation stack.
- */
- final val Rem = new OpCode()
- opcode(Rem, CEE_REM , "rem" , 0xFFFFFF5D, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two unsigned values and pushes the remainder onto the evaluation stack.
- */
- final val Rem_Un = new OpCode()
- opcode(Rem_Un, CEE_REM_UN, "rem.un" , 0xFFFFFF5E, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
- */
- final val And = new OpCode()
- opcode(And, CEE_AND, "and" , 0xFFFFFF5F, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compute the bitwise complement of the two integer values on top of the stack and
- * pushes the result onto the evaluation stack.
- */
- final val Or = new OpCode()
- opcode(Or, CEE_OR , "or" , 0xFFFFFF60, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Computes the bitwise XOR of the top two values on the evaluation stack,
- * pushing the result onto the evaluation stack.
- */
- final val Xor = new OpCode()
- opcode(Xor, CEE_XOR, "xor" , 0xFFFFFF61, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Shifts an integer value to the left (in zeroes) by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shl = new OpCode()
- opcode(Shl, CEE_SHL, "shl" , 0xFFFFFF62, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Shifts an integer value (in sign) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr = new OpCode()
- opcode(Shr, CEE_SHR, "shr" , 0xFFFFFF63, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr_Un = new OpCode()
- opcode(Shr_Un, CEE_SHR_UN, "shr.un" , 0xFFFFFF64, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Negates a value and pushes the result onto the evaluation stack.
- */
- final val Neg = new OpCode()
- opcode(Neg, CEE_NEG , "neg" , 0xFFFFFF65, POP_1 , PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Computes the bitwise complement of the integer value on top of the stack and pushes
- * the result onto the evaluation stack as the same type.
- */
- final val Not = new OpCode()
- opcode(Not, CEE_NOT , "not" , 0xFFFFFF66, POP_1 , PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32.
- */
- final val Conv_I1 = new OpCode()
- opcode(Conv_I1, CEE_CONV_I1, "conv.i1", 0xFFFFFF67, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32.
- */
- final val Conv_I2 = new OpCode()
- opcode(Conv_I2, CEE_CONV_I2, "conv.i2", 0xFFFFFF68, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int32.
- */
- final val Conv_I4 = new OpCode()
- opcode(Conv_I4, CEE_CONV_I4, "conv.i4", 0xFFFFFF69, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int64.
- */
- final val Conv_I8 = new OpCode()
- opcode(Conv_I8, CEE_CONV_I8, "conv.i8", 0xFFFFFF6A, POP_1 , PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to float32.
- */
- final val Conv_R4 = new OpCode()
- opcode(Conv_R4, CEE_CONV_R4, "conv.r4", 0xFFFFFF6B, POP_1 , PUSH_R4, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to float64.
- */
- final val Conv_R8 = new OpCode()
- opcode(Conv_R8, CEE_CONV_R8, "conv.r8", 0xFFFFFF6C, POP_1 , PUSH_R8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32.
- */
- final val Conv_U4 = new OpCode()
- opcode(Conv_U4, CEE_CONV_U4, "conv.u4", 0xFFFFFF6D, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64.
- */
- final val Conv_U8 = new OpCode()
- opcode(Conv_U8, CEE_CONV_U8, "conv.u8", 0xFFFFFF6E, POP_1 , PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Calls a late-bound method on an object, pushing the return value onto the evaluation stack.
- */
- final val Callvirt = new OpCode()
- opcode(Callvirt, CEE_CALLVIRT, "callvirt", 0xFFFFFF6F,POP_SPECIAL,PUSH_SPECIAL,INLINE_METHOD,FLOW_CALL)
-
- /**
- * Copies the value type located at the address of an object (type &, * or natural int)
- * to the address of the destination object (type &, * or natural int).
- */
- final val Cpobj = new OpCode()
- opcode(Cpobj, CEE_CPOBJ , "cpobj" , 0xFFFFFF70, POP_I_I , PUSH_NONE, INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Copies the value type object pointed to by an address to the top of the evaluation stack.
- */
- final val Ldobj = new OpCode()
- opcode(Ldobj, CEE_LDOBJ , "ldobj" , 0xFFFFFF71, POP_I , PUSH_1 , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Pushes a new object reference to a string literal stored in the metadata.
- */
- final val Ldstr = new OpCode()
- opcode(Ldstr, CEE_LDSTR , "ldstr" , 0xFFFFFF72, POP_NONE , PUSH_REF , INLINE_STRING, FLOW_NEXT)
-
- /**
- * Creates a new object or a new instance of a value type, pushing an object reference
- * (type O) onto the evaluation stack.
- */
- final val Newobj = new OpCode()
- opcode(Newobj, CEE_NEWOBJ, "newobj", 0xFFFFFF73, POP_SPECIAL , PUSH_REF , INLINE_METHOD, FLOW_CALL)
-
- /**
- * Attempts to cast an object passed by reference to the specified class.
- */
- final val Castclass = new OpCode()
- opcode(Castclass, CEE_CASTCLASS, "castclass", 0xFFFFFF74, POP_REF , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Tests whether an object reference (type O) is an instance of a particular class.
- */
- final val Isinst = new OpCode()
- opcode(Isinst, CEE_ISINST , "isinst" , 0xFFFFFF75, POP_REF , PUSH_I , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Converts the unsigned integer value on top of the evaluation stack to float32.
- */
- final val Conv_R_Un = new OpCode()
- opcode(Conv_R_Un, CEE_CONV_R_UN, "conv.r.un", 0xFFFFFF76, POP_1 , PUSH_R8 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the boxed representation of a value type to its unboxed form.
- */
- final val Unbox = new OpCode()
- opcode(Unbox, CEE_UNBOX , "unbox" , 0xFFFFFF79, POP_REF , PUSH_I , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Throws the exception object currently on the evaluation stack.
- */
- final val Throw = new OpCode()
- opcode(Throw, CEE_THROW , "throw" , 0xFFFFFF7A, POP_REF , PUSH_NONE, INLINE_NONE , FLOW_THROW)
-
- /**
- * Finds the value of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldfld = new OpCode()
- opcode(Ldfld, CEE_LDFLD , "ldfld" , 0xFFFFFF7B, POP_REF , PUSH_1 , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Finds the address of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldflda = new OpCode()
- opcode(Ldflda, CEE_LDFLDA , "ldflda" , 0xFFFFFF7C, POP_REF , PUSH_I , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Pushes the value of a static field onto the evaluation stack.
- */
- final val Ldsfld = new OpCode()
- opcode(Ldsfld, CEE_LDSFLD , "ldsfld" , 0xFFFFFF7E, POP_NONE , PUSH_1 , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Pushes the address of a static field onto the evaluation stack.
- */
- final val Ldsflda = new OpCode()
- opcode(Ldsflda, CEE_LDSFLDA, "ldsflda", 0xFFFFFF7F, POP_NONE , PUSH_I , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Replaces the value stored in the field of an object reference or pointer with a new value.
- */
- final val Stfld = new OpCode()
- opcode(Stfld, CEE_STFLD , "stfld" , 0xFFFFFF7D, POP_REF_1, PUSH_NONE, INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Replaces the value of a static field with a value from the evaluation stack.
- */
- final val Stsfld = new OpCode()
- opcode(Stsfld, CEE_STSFLD , "stsfld" , 0xFFFFFF80, POP_1 , PUSH_NONE, INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Copies a value of a specified type from the evaluation stack into a supplied memory address.
- */
- final val Stobj = new OpCode()
- opcode(Stobj, CEE_STOBJ , "stobj" , 0xFFFFFF81, POP_I_1, PUSH_NONE, INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1_Un = new OpCode()
- opcode(Conv_Ovf_I1_Un, CEE_CONV_OVF_I1_UN, "conv.ovf.i1.un", 0xFFFFFF82, POP_1,PUSH_I,INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2_Un = new OpCode()
- opcode(Conv_Ovf_I2_Un, CEE_CONV_OVF_I2_UN, "conv.ovf.i2.un", 0xFFFFFF83,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4_Un = new OpCode()
- opcode(Conv_Ovf_I4_Un, CEE_CONV_OVF_I4_UN, "conv.ovf.i4.un", 0xFFFFFF84,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8_Un = new OpCode()
- opcode(Conv_Ovf_I8_Un, CEE_CONV_OVF_I8_UN, "conv.ovf.i8.un", 0xFFFFFF85,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I_Un = new OpCode()
- opcode(Conv_Ovf_I_Un, CEE_CONV_OVF_I_UN , "conv.ovf.i.un" , 0xFFFFFF8A,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1_Un = new OpCode()
- opcode(Conv_Ovf_U1_Un, CEE_CONV_OVF_U1_UN, "conv.ovf.u1.un", 0xFFFFFF86,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2_Un = new OpCode()
- opcode(Conv_Ovf_U2_Un, CEE_CONV_OVF_U2_UN, "conv.ovf.u2.un", 0xFFFFFF87,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4_Un = new OpCode()
- opcode(Conv_Ovf_U4_Un, CEE_CONV_OVF_U4_UN, "conv.ovf.u4.un", 0xFFFFFF88,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8_Un = new OpCode()
- opcode(Conv_Ovf_U8_Un, CEE_CONV_OVF_U8_UN, "conv.ovf.u8.un", 0xFFFFFF89,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U_Un = new OpCode()
- opcode(Conv_Ovf_U_Un, CEE_CONV_OVF_U_UN , "conv.ovf.u.un" , 0xFFFFFF8B,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts a value type to an object reference (type O).
- */
- final val Box = new OpCode()
- opcode(Box, CEE_BOX , "box" , 0xFFFFFF8C, POP_1 , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Pushes an object reference to a new zero-based, one-dimensional array whose elements
- * are of a specific type onto the evaluation stack.
- */
- final val Newarr = new OpCode()
- opcode(Newarr, CEE_NEWARR, "newarr" , 0xFFFFFF8D, POP_I , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Pushes the number of elements of a zero-based, one-dimensional array
- * onto the evaluation stack.
- */
- final val Ldlen = new OpCode()
- opcode(Ldlen, CEE_LDLEN, "ldlen", 0xFFFFFF8E, POP_REF, PUSH_I,INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the address of the array element at a specified array index onto
- * the top of the evaluation stack as type & (managed pointer).
- */
- final val Ldelema = new OpCode()
- opcode(Ldelema, CEE_LDELEMA, "ldelema" , 0xFFFFFF8F, POP_REF_I, PUSH_I, INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Loads the element with type natural int at a specified array index onto the top
- * of the evaluation stack as a natural int.
- */
- final val Ldelem_I = new OpCode()
- opcode(Ldelem_I, CEE_LDELEM_I, "ldelem.i" , 0xFFFFFF97, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int8 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I1 = new OpCode()
- opcode(Ldelem_I1, CEE_LDELEM_I1, "ldelem.i1" , 0xFFFFFF90, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int16 at a specified array index onto the top of
- * the evaluation stack as an int32.
- */
- final val Ldelem_I2 = new OpCode()
- opcode(Ldelem_I2, CEE_LDELEM_I2, "ldelem.i2" , 0xFFFFFF92, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int32 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I4 = new OpCode()
- opcode(Ldelem_I4, CEE_LDELEM_I4, "ldelem.i4" , 0xFFFFFF94, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int64 at a specified array index onto the top of the
- * evaluation stack as an int64.
- */
- final val Ldelem_I8 = new OpCode()
- opcode(Ldelem_I8, CEE_LDELEM_I8, "ldelem.i8" , 0xFFFFFF96, POP_REF_I, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type float32 at a specified array index onto the top of the
- * evaluation stack as type F (float)
- */
- final val Ldelem_R4 = new OpCode()
- opcode(Ldelem_R4, CEE_LDELEM_R4, "ldelem.r4" , 0xFFFFFF98, POP_REF_I, PUSH_R4, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type float64 at a specified array index onto the top of the
- * evaluation stack as type F (float) .
- */
- final val Ldelem_R8 = new OpCode()
- opcode(Ldelem_R8, CEE_LDELEM_R8, "ldelem.r8" , 0xFFFFFF99, POP_REF_I, PUSH_R8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element containing an object reference at a specified array index onto
- * the top of the evaluation stack as type O (object reference).
- */
- final val Ldelem_Ref = new OpCode()
- opcode(Ldelem_Ref, CEE_LDELEM_REF, "ldelem.ref", 0xFFFFFF9A, POP_REF_I, PUSH_REF, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type unsigned int8 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U1 = new OpCode()
- opcode(Ldelem_U1, CEE_LDELEM_U1, "ldelem.u1" , 0xFFFFFF91, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type unsigned int16 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U2 = new OpCode()
- opcode(Ldelem_U2, CEE_LDELEM_U2, "ldelem.u2" , 0xFFFFFF93, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type unsigned int32 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U4 = new OpCode()
- opcode(Ldelem_U4, CEE_LDELEM_U4, "ldelem.u4" , 0xFFFFFF95, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the natural int value on
- * the evaluation stack.
- */
- final val Stelem_I = new OpCode()
- opcode(Stelem_I, CEE_STELEM_I, "stelem.i", 0xFFFFFF9B, POP_REF_I_I, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int8 value on the evaluation stack.
- */
- final val Stelem_I1 = new OpCode()
- opcode(Stelem_I1, CEE_STELEM_I1, "stelem.i1", 0xFFFFFF9C, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int16 value on the evaluation stack.
- */
- final val Stelem_I2 = new OpCode()
- opcode(Stelem_I2, CEE_STELEM_I2, "stelem.i2", 0xFFFFFF9D, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int32 value on the evaluation stack.
- */
- final val Stelem_I4 = new OpCode()
- opcode(Stelem_I4, CEE_STELEM_I4, "stelem.i4", 0xFFFFFF9E, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int64 value on the evaluation stack.
- */
- final val Stelem_I8 = new OpCode()
- opcode(Stelem_I8, CEE_STELEM_I8,"stelem.i8", 0xFFFFFF9F, POP_REF_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the float32 value on the evaluation stack.
- */
- final val Stelem_R4 = new OpCode()
- opcode(Stelem_R4, CEE_STELEM_R4,"stelem.r4", 0xFFFFFFA0, POP_REF_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the float64 value on the evaluation stack.
- */
- final val Stelem_R8 = new OpCode()
- opcode(Stelem_R8, CEE_STELEM_R8,"stelem.r8", 0xFFFFFFA1, POP_REF_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the object ref value (type O)
- * on the evaluation stack.
- */
- final val Stelem_Ref = new OpCode()
- opcode(Stelem_Ref, CEE_STELEM_REF,"stelem.ref",0xFFFFFFA2,POP_REF_I_REF,PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1 = new OpCode()
- opcode(Conv_Ovf_I1, CEE_CONV_OVF_I1, "conv.ovf.i1", 0xFFFFFFB3, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int16 and
- * extending it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2 = new OpCode()
- opcode(Conv_Ovf_I2, CEE_CONV_OVF_I2, "conv.ovf.i2", 0xFFFFFFB5, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4 = new OpCode()
- opcode(Conv_Ovf_I4, CEE_CONV_OVF_I4, "conv.ovf.i4", 0xFFFFFFB7, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8 = new OpCode()
- opcode(Conv_Ovf_I8, CEE_CONV_OVF_I8, "conv.ovf.i8", 0xFFFFFFB9, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1 = new OpCode()
- opcode(Conv_Ovf_U1, CEE_CONV_OVF_U1, "conv.ovf.u1", 0xFFFFFFB4, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2 = new OpCode()
- opcode(Conv_Ovf_U2, CEE_CONV_OVF_U2, "conv.ovf.u2", 0xFFFFFFB6, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4 = new OpCode()
- opcode(Conv_Ovf_U4, CEE_CONV_OVF_U4, "conv.ovf.u4", 0xFFFFFFB8, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8 = new OpCode()
- opcode(Conv_Ovf_U8, CEE_CONV_OVF_U8, "conv.ovf.u8", 0xFFFFFFBA, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Retrieves the address (type &) embedded in a typed reference.
- */
- final val Refanyval = new OpCode()
- opcode(Refanyval, CEE_REFANYVAL, "refanyval", 0xFFFFFFC2, POP_1, PUSH_I , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Retrieves the type token embedded in a typed reference .
- */
- final val Refanytype = new OpCode()
- opcode(Refanytype, CEE_REFANYTYPE, "refanytype", 0xFFFFFE1D, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Throws ArithmeticException if value is not a finite number.
- */
- final val Ckfinite = new OpCode()
- opcode(Ckfinite, CEE_CKFINITE, "ckfinite" , 0xFFFFFFC3, POP_1, PUSH_R8 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pushes a typed reference to an instance of a specific type onto the evaluation stack.
- */
- final val Mkrefany = new OpCode()
- opcode(Mkrefany, CEE_MKREFANY, "mkrefany" , 0xFFFFFFC6, POP_I, PUSH_1 , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack.
- */
- final val Ldtoken = new OpCode()
- opcode(Ldtoken, CEE_LDTOKEN , "ldtoken" , 0xFFFFFFD0, POP_NONE, PUSH_I, INLINE_TOKEN , FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32.
- */
- final val Conv_U1 = new OpCode()
- opcode(Conv_U1, CEE_CONV_U1 , "conv.u1" , 0xFFFFFFD2, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32.
- */
- final val Conv_U2 = new OpCode()
- opcode(Conv_U2, CEE_CONV_U2 , "conv.u2" , 0xFFFFFFD1, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to natural int.
- */
- final val Conv_I = new OpCode()
- opcode(Conv_I, CEE_CONV_I , "conv.i" , 0xFFFFFFD3, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I = new OpCode()
- opcode(Conv_Ovf_I, CEE_CONV_OVF_I , "conv.ovf.i", 0xFFFFFFD4, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U = new OpCode()
- opcode(Conv_Ovf_U, CEE_CONV_OVF_U , "conv.ovf.u", 0xFFFFFFD5, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Adds two integers, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf = new OpCode()
- opcode(Add_Ovf, CEE_ADD_OVF , "add.ovf" , 0xFFFFFFD6, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Adds two unsigned integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf_Un = new OpCode()
- opcode(Add_Ovf_Un, CEE_ADD_OVF_UN , "add.ovf.un", 0xFFFFFFD7, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Multiplies two integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Mul_Ovf = new OpCode()
- opcode(Mul_Ovf, CEE_MUL_OVF , "mul.ovf" , 0xFFFFFFD8, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Multiplies two unsigned integer values , performs an overflow check ,
- * and pushes the result onto the evaluation stack.
- */
- final val Mul_Ovf_Un = new OpCode()
- opcode(Mul_Ovf_Un, CEE_MUL_OVF_UN , "mul.ovf.un", 0xFFFFFFD9, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Subtracts one integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf = new OpCode()
- opcode(Sub_Ovf, CEE_SUB_OVF , "sub.ovf" , 0xFFFFFFDA, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Subtracts one unsigned integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf_Un = new OpCode()
- opcode(Sub_Ovf_Un, CEE_SUB_OVF_UN, "sub.ovf.un", 0xFFFFFFDB, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Transfers control from the fault or finally clause of an exception block back to
- * the Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfinally = new OpCode()
- opcode(Endfinally, CEE_ENDFINALLY, "endfinally", 0xFFFFFFDC, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_RETURN)
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a specific target instruction.
- */
- final val Leave = new OpCode()
- opcode(Leave, CEE_LEAVE, "leave", 0xFFFFFFDD, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH)
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a target instruction (short form).
- */
- final val Leave_S = new OpCode()
- opcode(Leave_S, CEE_LEAVE_S, "leave.s", 0xFFFFFFDE, POP_NONE, PUSH_NONE, INLINE_TARGET_S, FLOW_BRANCH)
-
- /**
- * Stores a value of type natural int at a supplied address.
- */
- final val Stind_I = new OpCode()
- opcode(Stind_I, CEE_STIND_I, "stind.i", 0xFFFFFFDF, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned natural int,
- * and extends it to natural int.
- */
- final val Conv_U = new OpCode()
- opcode(Conv_U, CEE_CONV_U, "conv.u", 0xFFFFFFE0, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Returns an unmanaged pointer to the argument list of the current method.
- */
- final val Arglist = new OpCode()
- opcode(Arglist, CEE_ARGLIST, "arglist" , 0xFFFFFE00, POP_NONE, PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two values. If they are equal, the integer value 1 (int32) is pushed
- * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Ceq = new OpCode()
- opcode(Ceq, CEE_CEQ, "ceq", 0xFFFFFE01, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two values. If the first value is greater than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt = new OpCode()
- opcode(Cgt, CEE_CGT, "cgt", 0xFFFFFE02, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two unsigned or unordered values. If the first value is greater than
- * the second, the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt_Un = new OpCode()
- opcode(Cgt_Un, CEE_CGT_UN, "cgt.un", 0xFFFFFE03, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two values. If the first value is less than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt = new OpCode()
- opcode(Clt, CEE_CLT, "clt" , 0xFFFFFE04, POP_1_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares the unsigned or unordered values value1 and value2. If value1 is
- * less than value2, then the integer value 1 (int32) is pushed onto the
- * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt_Un = new OpCode()
- opcode(Clt_Un, CEE_CLT_UN , "clt.un" , 0xFFFFFE05, POP_1_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a specific method onto the evaluation stack.
- */
- final val Ldftn = new OpCode()
- opcode(Ldftn, CEE_LDFTN , "ldftn" , 0xFFFFFE06, POP_NONE, PUSH_I , INLINE_METHOD, FLOW_NEXT)
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a particular virtual method associated with a specified object onto the evaluation stack.
- */
- final val Ldvirtftn = new OpCode()
- opcode(Ldvirtftn, CEE_LDVIRTFTN, "ldvirtftn", 0xFFFFFE07, POP_REF , PUSH_I , INLINE_METHOD, FLOW_NEXT)
-
- /**
- * Loads an argument (referenced by a specified index value) onto the stack.
- */
- final val Ldarg = new OpCode()
- opcode(Ldarg, CEE_LDARG , "ldarg" , 0xFFFFFE09, POP_NONE, PUSH_1 , INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Load an argument address onto the evaluation stack.
- */
- final val Ldarga = new OpCode()
- opcode(Ldarga, CEE_LDARGA , "ldarga", 0xFFFFFE0A, POP_NONE, PUSH_I, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloc = new OpCode()
- opcode(Ldloc, CEE_LDLOC, "ldloc", 0xFFFFFE0C, POP_NONE, PUSH_1 , INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Loads the address of the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloca = new OpCode()
- opcode(Ldloca, CEE_LDLOCA, "ldloca", 0xFFFFFE0D, POP_NONE, PUSH_I, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot at a specified index.
- */
- final val Starg = new OpCode()
- opcode(Starg, CEE_STARG, "starg", 0xFFFFFE0B, POP_1 , PUSH_NONE, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it in a
- * the local variable list at a specified index.
- */
- final val Stloc = new OpCode()
- opcode(Stloc, CEE_STLOC, "stloc", 0xFFFFFE0E, POP_1 , PUSH_NONE, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
- */
- final val Localloc = new OpCode()
- opcode(Localloc, CEE_LOCALLOC, "localloc" , 0xFFFFFE0F, POP_I, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Transfers control from the filter clause of an exception back to the
- * Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfilter = new OpCode()
- opcode(Endfilter, CEE_ENDFILTER, "endfilter" , 0xFFFFFE11, POP_I , PUSH_NONE, INLINE_NONE, FLOW_RETURN)
-
- /**
- * Indicates that an address currently atop the evaluation stack might not be aligned
- * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj,
- * stobj, initblk, or cpblk instruction.
- */
- final val Unaligned = new OpCode()
- opcode(Unaligned, CEE_UNALIGNED, "unaligned.", 0xFFFFFE12, POP_NONE, PUSH_NONE, INLINE_I_S , FLOW_META)
-
- /**
- * Specifies that an address currently atop the evaluation stack might be volatile,
- * and the results of reading that location cannot be cached or that multiple stores
- * to that location cannot be suppressed.
- */
- final val Volatile = new OpCode()
- opcode(Volatile, CEE_VOLATILE, "volatile." , 0xFFFFFE13, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META)
-
- /**
- * Performs a postfixed method call instruction such that the current method's stack
- * frame is removed before the actual call instruction is executed.
- */
- final val Tailcall = new OpCode()
- opcode(Tailcall, CEE_TAILCALL, "tail." , 0xFFFFFE14, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META)
-
- /**
- * Initializes all the fields of the object at a specific address to a null reference
- * or a 0 of the appropriate primitive type.
- */
- final val Initobj = new OpCode()
- opcode(Initobj, CEE_INITOBJ , "initobj" , 0xFFFFFE15, POP_I , PUSH_NONE, INLINE_TYPE, FLOW_NEXT)
-
- /**
- * Copies a specified number bytes from a source address to a destination address .
- */
- final val Cpblk = new OpCode()
- opcode(Cpblk, CEE_CPBLK , "cpblk" , 0xFFFFFE17, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Initializes a specified block of memory at a specific address to a given size
- * and initial value.
- */
- final val Initblk = new OpCode()
- opcode(Initblk, CEE_INITBLK , "initblk" , 0xFFFFFE18, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Rethrows the current exception.
- */
- final val Rethrow = new OpCode()
- opcode(Rethrow, CEE_RETHROW , "rethrow", 0xFFFFFE1A, POP_NONE , PUSH_NONE, INLINE_NONE, FLOW_THROW)
-
- /**
- * Pushes the size, in bytes, of a supplied value type onto the evaluation stack.
- */
- final val Sizeof = new OpCode()
- opcode(Sizeof, CEE_SIZEOF, "sizeof", 0xFFFFFE1C, POP_NONE , PUSH_I , INLINE_TYPE, FLOW_NEXT)
-
-
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
deleted file mode 100644
index 80e4267436..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
+++ /dev/null
@@ -1,1205 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-
-/**
- * Provides field representations of the Microsoft Intermediate Language (MSIL)
- * instructions for emission by the ILGenerator class members (such as Emit).
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-object OpCodes {
-
- //##########################################################################
-
- /**
- * Adds two values and pushes the result onto the evaluation stack.
- */
- final val Add = OpCode.Add
-
- /**
- * Fills space if bytecodes are patched. No meaningful operation is performed
- * although a processing cycle can be consumed.
- */
- final val Nop = OpCode.Nop
-
- /**
- * Signals the Common Language Infrastructure (CLI) to inform the debugger that
- * a break point has been tripped.
- */
- final val Break = OpCode.Break
-
- /**
- * Loads the argument at index 0 onto the evaluation stack.
- */
- final val Ldarg_0 = OpCode.Ldarg_0
-
- /**
- * Loads the argument at index 1 onto the evaluation stack.
- */
- final val Ldarg_1 = OpCode.Ldarg_1
-
- /**
- * Loads the argument at index 2 onto the evaluation stack.
- */
- final val Ldarg_2 = OpCode.Ldarg_2
-
- /**
- * Loads the argument at index 3 onto the evaluation stack.
- */
- final val Ldarg_3 = OpCode.Ldarg_3
-
- /**
- * Loads the local variable at index 0 onto the evaluation stack.
- */
- final val Ldloc_0 = OpCode.Ldloc_0
-
- /**
- * Loads the local variable at index 1 onto the evaluation stack.
- */
- final val Ldloc_1 = OpCode.Ldloc_1
-
- /**
- * Loads the local variable at index 2 onto the evaluation stack.
- */
- final val Ldloc_2 = OpCode.Ldloc_2
-
- /**
- * Loads the local variable at index 3 onto the evaluation stack.
- */
- final val Ldloc_3 = OpCode.Ldloc_3
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 0.
- */
- final val Stloc_0 = OpCode.Stloc_0
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 1.
- */
- final val Stloc_1 = OpCode.Stloc_1
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 2.
- */
- final val Stloc_2 = OpCode.Stloc_2
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 3.
- */
- final val Stloc_3 = OpCode.Stloc_3
-
- /**
- * Loads the argument (referenced by a specified short form index)
- * onto the evaluation stack.
- */
- final val Ldarg_S = OpCode.Ldarg_S
-
- /**
- * Load an argument address, in short form, onto the evaluation stack.
- */
- final val Ldarga_S = OpCode.Ldarga_S
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack,
- * short form.
- */
- final val Ldloc_S = OpCode.Ldloc_S
-
- /**
- * Loads the address of the local variable at a specific index onto
- * the evaluation stack, short form.
- */
- final val Ldloca_S = OpCode.Ldloca_S
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot
- * at a specified index, short form.
- */
- final val Starg_S = OpCode.Starg_S
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it
- * in a the local variable list at index (short form).
- */
- final val Stloc_S = OpCode.Stloc_S
-
- /**
- * Pushes a null reference (type O) onto the evaluation stack.
- */
- final val Ldnull = OpCode.Ldnull
-
- /**
- * Pushes the integer value of -1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_M1 = OpCode.Ldc_I4_M1
-
- /**
- * Pushes the integer value of 0 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_0 = OpCode.Ldc_I4_0
-
- /**
- * Pushes the integer value of 1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_1 = OpCode.Ldc_I4_1
-
- /**
- * Pushes the integer value of 2 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_2 = OpCode.Ldc_I4_2
-
- /**
- * Pushes the integer value of 3 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_3 = OpCode.Ldc_I4_3
-
- /**
- * Pushes the integer value of 4 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_4 = OpCode.Ldc_I4_4
-
- /**
- * Pushes the integer value of 5 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_5 = OpCode.Ldc_I4_5
-
- /**
- * Pushes the integer value of 6 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_6 = OpCode.Ldc_I4_6
-
- /**
- * Pushes the integer value of 7 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_7 = OpCode.Ldc_I4_7
-
- /**
- * Pushes the integer value of 8 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_8 = OpCode.Ldc_I4_8
-
- /**
- * Pushes the supplied int8 value onto the evaluation stack as an int32, short form.
- */
- final val Ldc_I4_S = OpCode.Ldc_I4_S
-
- /**
- * Pushes a supplied value of type int32 onto the evaluation stack as an int32.
- */
- final val Ldc_I4 = OpCode.Ldc_I4
-
- /**
- * Pushes a supplied value of type int64 onto the evaluation stack as an int64.
- */
- final val Ldc_I8 = OpCode.Ldc_I8
-
- /**
- * Pushes a supplied value of type float32 onto the evaluation stack as type F (float).
- */
- final val Ldc_R4 = OpCode.Ldc_R4
-
- /**
- * Pushes a supplied value of type float64 onto the evaluation stack as type F (float).
- */
- final val Ldc_R8 = OpCode.Ldc_R8
-
- /**
- * Copies the current topmost value on the evaluation stack, and then pushes the copy
- * onto the evaluation stack.
- */
- final val Dup = OpCode.Dup
-
- /**
- * Removes the value currently on top of the evaluation stack.
- */
- final val Pop = OpCode.Pop
-
- /**
- * Exits current method and jumps to specified method.
- */
- final val Jmp = OpCode.Jmp
-
- /**
- * Calls the method indicated by the passed method descriptor.
- */
- final val Call = OpCode.Call
-
- /**
- * constrained. prefix
- */
- final val Constrained = OpCode.Constrained
-
- /**
- * readonly. prefix
- */
- final val Readonly = OpCode.Readonly
-
- /**
- * Calls the method indicated on the evaluation stack (as a pointer to an entry point)
- * with arguments described by a calling convention.
- */
- final val Calli = OpCode.Calli
-
- /**
- * Returns from the current method, pushing a return value (if present) from the caller's
- * evaluation stack onto the callee's evaluation stack.
- */
- final val Ret = OpCode.Ret
-
- /**
- * Unconditionally transfers control to a target instruction (short form).
- */
- final val Br_S = OpCode.Br_S
-
- /**
- * Transfers control to a target instruction if value is false, a null reference, or zero.
- */
- final val Brfalse_S = OpCode.Brfalse_S
-
- /**
- * Transfers control to a target instruction (short form) if value is true, not null, or non-zero.
- */
- final val Brtrue_S = OpCode.Brtrue_S
-
- /**
- * Transfers control to a target instruction (short form) if two values are equal.
- */
- final val Beq_S = OpCode.Beq_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * or equal to the second value.
- */
- final val Bge_S = OpCode.Bge_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value.
- */
- final val Bgt_S = OpCode.Bgt_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value.
- */
- final val Ble_S = OpCode.Ble_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value.
- */
- final val Blt_S = OpCode.Blt_S
-
- /**
- * Transfers control to a target instruction (short form) when two unsigned integer values
- * or unordered float values are not equal.
- */
- final val Bne_Un_S = OpCode.Bne_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greather
- * than the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un_S = OpCode.Bge_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un_S = OpCode.Bgt_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un_S = OpCode.Ble_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un_S = OpCode.Blt_Un_S
-
- /**
- * Unconditionally transfers control to a target instruction.
- */
- final val Br = OpCode.Br
-
- /**
- * Transfers control to a target instruction if value is false, a null reference
- * (Nothing in Visual Basic), or zero.
- */
- final val Brfalse = OpCode.Brfalse
-
- /**
- * Transfers control to a target instruction if value is true, not null, or non-zero.
- */
- final val Brtrue = OpCode.Brtrue
-
- /**
- * Transfers control to a target instruction if two values are equal.
- */
- final val Beq = OpCode.Beq
-
- /**
- * Transfers control to a target instruction if the first value is greater than or
- * equal to the second value.
- */
- final val Bge = OpCode.Bge
-
- /**
- * Transfers control to a target instruction if the first value is greater than the second value.
- */
- final val Bgt = OpCode.Bgt
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal
- * to the second value.
- */
- final val Ble = OpCode.Ble
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value.
- */
- final val Blt = OpCode.Blt
-
- /**
- * Transfers control to a target instruction when two unsigned integer values or
- * unordered float values are not equal.
- */
- final val Bne_Un = OpCode.Bne_Un
-
- /**
- * Transfers control to a target instruction if the first value is greather than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un = OpCode.Bge_Un
-
- /**
- * Transfers control to a target instruction if the first value is greater than the
- * second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un = OpCode.Bgt_Un
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal to
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un = OpCode.Ble_Un
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value,
- * when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un = OpCode.Blt_Un
-
- /**
- * Implements a jump table.
- */
- final val Switch = OpCode.Switch
-
- /**
- * Loads a value of type int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I1 = OpCode.Ldind_I1
-
- /**
- * Loads a value of type int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I2 = OpCode.Ldind_I2
-
- /**
- * Loads a value of type int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I4 = OpCode.Ldind_I4
-
- /**
- * Loads a value of type int64 as an int64 onto the evaluation stack indirectly.
- */
- final val Ldind_I8 = OpCode.Ldind_I8
-
- /**
- * Loads a value of type natural int as a natural int onto the evaluation stack indirectly.
- */
- final val Ldind_I = OpCode.Ldind_I
-
- /**
- * Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R4 = OpCode.Ldind_R4
-
- /**
- * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R8 = OpCode.Ldind_R8
-
- /**
- * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly.
- */
- final val Ldind_Ref = OpCode.Ldind_Ref
-
- /**
- * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U1 = OpCode.Ldind_U1
-
- /**
- * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U2 = OpCode.Ldind_U2
-
- /**
- * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U4 = OpCode.Ldind_U4
-
- /**
- * Stores a object reference value at a supplied address.
- */
- final val Stind_Ref = OpCode.Stind_Ref
-
- /**
- * Stores a value of type int8 at a supplied address.
- */
- final val Stind_I1 = OpCode.Stind_I1
-
- /**
- * Stores a value of type int16 at a supplied address.
- */
- final val Stind_I2 = OpCode.Stind_I2
-
- /**
- * Stores a value of type int32 at a supplied address.
- */
- final val Stind_I4 = OpCode.Stind_I4
-
- /**
- * Stores a value of type int64 at a supplied address.
- */
- final val Stind_I8 = OpCode.Stind_I8
-
- /**
- * Stores a value of type float32 at a supplied address.
- */
- final val Stind_R4 = OpCode.Stind_R4
-
- /**
- * Stores a value of type float64 at a supplied address.
- */
- final val Stind_R8 = OpCode.Stind_R8
-
- /**
- * Subtracts one value from another and pushes the result onto the evaluation stack.
- */
- final val Sub = OpCode.Sub
-
- /**
- * Multiplies two values and pushes the result on the evaluation stack.
- */
- final val Mul = OpCode.Mul
-
- /**
- * Divides two values and pushes the result as a floating-point (type F) or
- * quotient (type int32) onto the evaluation stack.
- */
- final val Div = OpCode.Div
-
- /**
- * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack.
- */
- final val Div_Un = OpCode.Div_Un
-
- /**
- * Divides two values and pushes the remainder onto the evaluation stack.
- */
- final val Rem = OpCode.Rem
-
- /**
- * Divides two unsigned values and pushes the remainder onto the evaluation stack.
- */
- final val Rem_Un = OpCode.Rem_Un
-
- /**
- * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
- */
- final val And = OpCode.And
-
- /**
- * Compute the bitwise complement of the two integer values on top of the stack and
- * pushes the result onto the evaluation stack.
- */
- final val Or = OpCode.Or
-
- /**
- * Computes the bitwise XOR of the top two values on the evaluation stack,
- * pushing the result onto the evaluation stack.
- */
- final val Xor = OpCode.Xor
-
- /**
- * Shifts an integer value to the left (in zeroes) by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shl = OpCode.Shl
-
- /**
- * Shifts an integer value (in sign) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr = OpCode.Shr
-
- /**
- * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr_Un = OpCode.Shr_Un
-
- /**
- * Negates a value and pushes the result onto the evaluation stack.
- */
- final val Neg = OpCode.Neg
-
- /**
- * Computes the bitwise complement of the integer value on top of the stack and pushes
- * the result onto the evaluation stack as the same type.
- */
- final val Not = OpCode.Not
-
- /**
- * Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32.
- */
- final val Conv_I1 = OpCode.Conv_I1
-
- /**
- * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32.
- */
- final val Conv_I2 = OpCode.Conv_I2
-
- /**
- * Converts the value on top of the evaluation stack to int32.
- */
- final val Conv_I4 = OpCode.Conv_I4
-
- /**
- * Converts the value on top of the evaluation stack to int64.
- */
- final val Conv_I8 = OpCode.Conv_I8
-
- /**
- * Converts the value on top of the evaluation stack to float32.
- */
- final val Conv_R4 = OpCode.Conv_R4
-
- /**
- * Converts the value on top of the evaluation stack to float64.
- */
- final val Conv_R8 = OpCode.Conv_R8
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32.
- */
- final val Conv_U4 = OpCode.Conv_U4
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64.
- */
- final val Conv_U8 = OpCode.Conv_U8
-
- /**
- * Calls a late-bound method on an object, pushing the return value onto the evaluation stack.
- */
- final val Callvirt = OpCode.Callvirt
-
- /**
- * Copies the value type located at the address of an object (type &, * or natural int)
- * to the address of the destination object (type &, * or natural int).
- */
- final val Cpobj = OpCode.Cpobj
-
- /**
- * Copies the value type object pointed to by an address to the top of the evaluation stack.
- */
- final val Ldobj = OpCode.Ldobj
-
- /**
- * Pushes a new object reference to a string literal stored in the metadata.
- */
- final val Ldstr = OpCode.Ldstr
-
- /**
- * Creates a new object or a new instance of a value type, pushing an object reference
- * (type O) onto the evaluation stack.
- */
- final val Newobj = OpCode.Newobj
-
- /**
- * Attempts to cast an object passed by reference to the specified class.
- */
- final val Castclass = OpCode.Castclass
-
- /**
- * Tests whether an object reference (type O) is an instance of a particular class.
- */
- final val Isinst = OpCode.Isinst
-
- /**
- * Converts the unsigned integer value on top of the evaluation stack to float32.
- */
- final val Conv_R_Un = OpCode.Conv_R_Un
-
- /**
- * Converts the boxed representation of a value type to its unboxed form.
- */
- final val Unbox = OpCode.Unbox
-
- /**
- * Throws the exception object currently on the evaluation stack.
- */
- final val Throw = OpCode.Throw
-
- /**
- * Finds the value of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldfld = OpCode.Ldfld
-
- /**
- * Finds the address of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldflda = OpCode.Ldflda
-
- /**
- * Pushes the value of a static field onto the evaluation stack.
- */
- final val Ldsfld = OpCode.Ldsfld
-
- /**
- * Pushes the address of a static field onto the evaluation stack.
- */
- final val Ldsflda = OpCode.Ldsflda
-
- /**
- * Replaces the value stored in the field of an object reference or pointer with a new value.
- */
- final val Stfld = OpCode.Stfld
-
- /**
- * Replaces the value of a static field with a value from the evaluation stack.
- */
- final val Stsfld = OpCode.Stsfld
-
- /**
- * Copies a value of a specified type from the evaluation stack into a supplied memory address.
- */
- final val Stobj = OpCode.Stobj
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1_Un = OpCode.Conv_Ovf_I1_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2_Un = OpCode.Conv_Ovf_I2_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4_Un = OpCode.Conv_Ovf_I4_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8_Un = OpCode.Conv_Ovf_I8_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I_Un = OpCode.Conv_Ovf_I_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1_Un = OpCode.Conv_Ovf_U1_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2_Un = OpCode.Conv_Ovf_U2_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4_Un = OpCode.Conv_Ovf_U4_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8_Un = OpCode.Conv_Ovf_U8_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U_Un = OpCode.Conv_Ovf_U_Un
-
- /**
- * Converts a value type to an object reference (type O).
- */
- final val Box = OpCode.Box
-
- /**
- * Pushes an object reference to a new zero-based, one-dimensional array whose elements
- * are of a specific type onto the evaluation stack.
- */
- final val Newarr = OpCode.Newarr
-
- /**
- * Pushes the number of elements of a zero-based, one-dimensional array
- * onto the evaluation stack.
- */
- final val Ldlen = OpCode.Ldlen
-
- /**
- * Loads the address of the array element at a specified array index onto
- * the top of the evaluation stack as type & (managed pointer).
- */
- final val Ldelema = OpCode.Ldelema
-
- /**
- * Loads the element with type natural int at a specified array index onto the top
- * of the evaluation stack as a natural int.
- */
- final val Ldelem_I = OpCode.Ldelem_I
-
- /**
- * Loads the element with type int8 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I1 = OpCode.Ldelem_I1
-
- /**
- * Loads the element with type int16 at a specified array index onto the top of
- * the evaluation stack as an int32.
- */
- final val Ldelem_I2 = OpCode.Ldelem_I2
-
- /**
- * Loads the element with type int32 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I4 = OpCode.Ldelem_I4
-
- /**
- * Loads the element with type int64 at a specified array index onto the top of the
- * evaluation stack as an int64.
- */
- final val Ldelem_I8 = OpCode.Ldelem_I8
-
- /**
- * Loads the element with type float32 at a specified array index onto the top of the
- * evaluation stack as type F (float)
- */
- final val Ldelem_R4 = OpCode.Ldelem_R4
-
- /**
- * Loads the element with type float64 at a specified array index onto the top of the
- * evaluation stack as type F (float) .
- */
- final val Ldelem_R8 = OpCode.Ldelem_R8
-
- /**
- * Loads the element containing an object reference at a specified array index onto
- * the top of the evaluation stack as type O (object reference).
- */
- final val Ldelem_Ref = OpCode.Ldelem_Ref
-
- /**
- * Loads the element with type unsigned int8 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U1 = OpCode.Ldelem_U1
-
- /**
- * Loads the element with type unsigned int16 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U2 = OpCode.Ldelem_U2
-
- /**
- * Loads the element with type unsigned int32 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U4 = OpCode.Ldelem_U4
-
- /**
- * Replaces the array element at a given index with the natural int value on
- * the evaluation stack.
- */
- final val Stelem_I = OpCode.Stelem_I
-
- /**
- * Replaces the array element at a given index with the int8 value on the evaluation stack.
- */
- final val Stelem_I1 = OpCode.Stelem_I1
-
- /**
- * Replaces the array element at a given index with the int16 value on the evaluation stack.
- */
- final val Stelem_I2 = OpCode.Stelem_I2
-
- /**
- * Replaces the array element at a given index with the int32 value on the evaluation stack.
- */
- final val Stelem_I4 = OpCode.Stelem_I4
-
- /**
- * Replaces the array element at a given index with the int64 value on the evaluation stack.
- */
- final val Stelem_I8 = OpCode.Stelem_I8
-
- /**
- * Replaces the array element at a given index with the float32 value on the evaluation stack.
- */
- final val Stelem_R4 = OpCode.Stelem_R4
-
- /**
- * Replaces the array element at a given index with the float64 value on the evaluation stack.
- */
- final val Stelem_R8 = OpCode.Stelem_R8
-
- /**
- * Replaces the array element at a given index with the object ref value (type O)
- * on the evaluation stack.
- */
- final val Stelem_Ref = OpCode.Stelem_Ref
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1 = OpCode.Conv_Ovf_I1
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int16 and
- * extending it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2 = OpCode.Conv_Ovf_I2
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4 = OpCode.Conv_Ovf_I4
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8 = OpCode.Conv_Ovf_I8
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1 = OpCode.Conv_Ovf_U1
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2 = OpCode.Conv_Ovf_U2
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4 = OpCode.Conv_Ovf_U4
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8 = OpCode.Conv_Ovf_U8
-
- /**
- * Retrieves the address (type &) embedded in a typed reference.
- */
- final val Refanyval = OpCode.Refanyval
-
- /**
- * Retrieves the type token embedded in a typed reference .
- */
- final val Refanytype = OpCode.Refanytype
-
- /**
- * Throws ArithmeticException if value is not a finite number.
- */
- final val Ckfinite = OpCode.Ckfinite
-
- /**
- * Pushes a typed reference to an instance of a specific type onto the evaluation stack.
- */
- final val Mkrefany = OpCode.Mkrefany
-
- /**
- * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack.
- */
- final val Ldtoken = OpCode.Ldtoken
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32.
- */
- final val Conv_U1 = OpCode.Conv_U1
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32.
- */
- final val Conv_U2 = OpCode.Conv_U2
-
- /**
- * Converts the value on top of the evaluation stack to natural int.
- */
- final val Conv_I = OpCode.Conv_I
-
- /**
- * Converts the signed value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I = OpCode.Conv_Ovf_I
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U = OpCode.Conv_Ovf_U
-
- /**
- * Adds two integers, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf = OpCode.Add_Ovf
-
- /**
- * Adds two unsigned integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf_Un = OpCode.Add_Ovf_Un
-
- /**
- * Multiplies two integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Mul_Ovf = OpCode.Mul_Ovf
-
- /**
- * Multiplies two unsigned integer values , performs an overflow check ,
- * and pushes the result onto the evaluation stack.
- */
- final val Mul_Ovf_Un = OpCode.Mul_Ovf_Un
-
- /**
- * Subtracts one integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf = OpCode.Sub_Ovf
-
- /**
- * Subtracts one unsigned integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf_Un = OpCode.Sub_Ovf_Un
-
- /**
- * Transfers control from the fault or finally clause of an exception block back to
- * the Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfinally = OpCode.Endfinally
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a specific target instruction.
- */
- final val Leave = OpCode.Leave
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a target instruction (short form).
- */
- final val Leave_S = OpCode.Leave_S
-
- /**
- * Stores a value of type natural int at a supplied address.
- */
- final val Stind_I = OpCode.Stind_I
-
- /**
- * Converts the value on top of the evaluation stack to unsigned natural int,
- * and extends it to natural int.
- */
- final val Conv_U = OpCode.Conv_U
-
- /**
- * Returns an unmanaged pointer to the argument list of the current method.
- */
- final val Arglist = OpCode.Arglist
-
- /**
- * Compares two values. If they are equal, the integer value 1 (int32) is pushed
- * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Ceq = OpCode.Ceq
-
- /**
- * Compares two values. If the first value is greater than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt = OpCode.Cgt
-
- /**
- * Compares two unsigned or unordered values. If the first value is greater than
- * the second, the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt_Un = OpCode.Cgt_Un
-
- /**
- * Compares two values. If the first value is less than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt = OpCode.Clt
-
- /**
- * Compares the unsigned or unordered values value1 and value2. If value1 is
- * less than value2, then the integer value 1 (int32) is pushed onto the
- * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt_Un = OpCode.Clt_Un
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a specific method onto the evaluation stack.
- */
- final val Ldftn = OpCode.Ldftn
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a particular virtual method associated with a specified object onto the evaluation stack.
- */
- final val Ldvirtftn = OpCode.Ldvirtftn
-
- /**
- * Loads an argument (referenced by a specified index value) onto the stack.
- */
- final val Ldarg = OpCode.Ldarg
-
- /**
- * Load an argument address onto the evaluation stack.
- */
- final val Ldarga = OpCode.Ldarga
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloc = OpCode.Ldloc
-
- /**
- * Loads the address of the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloca = OpCode.Ldloca
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot at a specified index.
- */
- final val Starg = OpCode.Starg
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it in a
- * the local variable list at a specified index.
- */
- final val Stloc = OpCode.Stloc
-
- /**
- * Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
- */
- final val Localloc = OpCode.Localloc
-
- /**
- * Transfers control from the filter clause of an exception back to the
- * Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfilter = OpCode.Endfilter
-
- /**
- * Indicates that an address currently atop the evaluation stack might not be aligned
- * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj,
- * stobj, initblk, or cpblk instruction.
- */
- final val Unaligned = OpCode.Unaligned
-
- /**
- * Specifies that an address currently atop the evaluation stack might be volatile,
- * and the results of reading that location cannot be cached or that multiple stores
- * to that location cannot be suppressed.
- */
- final val Volatile = OpCode.Volatile
-
- /**
- * Performs a postfixed method call instruction such that the current method's stack
- * frame is removed before the actual call instruction is executed.
- */
- final val Tailcall = OpCode.Tailcall
-
- /**
- * Initializes all the fields of the object at a specific address to a null reference
- * or a 0 of the appropriate primitive type.
- */
- final val Initobj = OpCode.Initobj
-
- /**
- * Copies a specified number bytes from a source address to a destination address .
- */
- final val Cpblk = OpCode.Cpblk
-
- /**
- * Initializes a specified block of memory at a specific address to a given size
- * and initial value.
- */
- final val Initblk = OpCode.Initblk
-
- /**
- * Rethrows the current exception.
- */
- final val Rethrow = OpCode.Rethrow
-
- /**
- * Pushes the size, in bytes, of a supplied value type onto the evaluation stack.
- */
- final val Sizeof = OpCode.Sizeof
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
deleted file mode 100644
index 8f9d81a8b0..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import ch.epfl.lamp.compiler.msil.ParameterInfo
-import java.io.IOException
-
-/**
- * Creates or associates parameter information.
- * Parameter attributes need to consistent with the method signature.
- * If you specify Out attributes for a parameter, you should ensure that
- * the type of that method parameter is a ByRef type
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ParameterBuilder(name: String, tpe: Type, attr: Int, pos: Int)
- extends ParameterInfo(name, tpe, attr, pos)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** The apply method for a visitor */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseParameterBuilder(this)
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
deleted file mode 100644
index 5d59d4d25a..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Iterator
-import java.util.HashMap
-import java.util.Arrays
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.emit
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly in a single file that can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @author Daniel Lorch
- * @version 1.0
- */
-final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisitor {
- var fileName: String = _fileName
-
- out = new PrintWriter(new BufferedWriter(new FileWriter(fileName)))
-
- /**
- * Visit an AssemblyBuilder
- */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) {
- ILPrinterVisitor.currAssembly = assemblyBuilder
-
- // first get the entryPoint
- this.entryPoint = assemblyBuilder.EntryPoint
-
- // all external assemblies
- as = assemblyBuilder.getExternAssemblies()
- scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator)
-
- assemblyBuilder.generatedFiles += fileName
- printAssemblyBoilerplate()
-
- // print each module
- var m: Array[Module] = assemblyBuilder.GetModules()
- nomembers = true
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
-
- nomembers = false
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
- // close out file
- out.close()
- ILPrinterVisitor.currAssembly = null
- }
-
- /**
- * Visit a ModuleBuilder
- */
- @throws(classOf[IOException])
- def caseModuleBuilder(module: ModuleBuilder) {
- // print module declaration
- currentModule = module
- if (nomembers) {
- print(".module \'"); print(module.Name); println("\'")
- printAttributes(module)
- }
-
- if (!module.globalsCreated)
- module.CreateGlobalFunctions()
-
- var m: Array[MethodInfo] = module.GetMethods()
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[MethodBuilder])
- }
-
- var t: Array[Type] = module.GetTypes()
- for(i <- 0 until t.length) {
- print(t(i).asInstanceOf[TypeBuilder])
- }
- currentModule = null
- }
-
-} // class SingleFileILPrinterVisitor
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
deleted file mode 100644
index 57dc883898..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-
-import java.io.IOException
-
-/**
- * Defines and creates new instances of classes during runtime.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type)
- extends Type(module, attributes, fullName, baseType, interfaces, declType, 0)
- with ICustomAttributeSetter
- with Visitable
-{
- import TypeBuilder._
-
- //##########################################################################
- // public members
-
- /** 'Bakes' the type. */
- def CreateType(): Type = {
- fields = fieldBuilders.toArray // (new Array[FieldInfo](fieldBuilders.size())).asInstanceOf[Array[FieldInfo]]
- methods = methodBuilders.toArray // (new Array[MethodInfo](methodBuilders.size())).asInstanceOf[Array[MethodInfo]]
- constructors = constructorBuilders.toArray // (new Array[ConstructorInfo](constructorBuilders.size())).asInstanceOf[Array[ConstructorInfo]]
- nestedTypes = nestedTypeBuilders.toArray // (new Array[Type](nestedTypeBuilders.size())).asInstanceOf[Array[Type]]
-
- raw = false
- if (DeclaringType == null)
- Module.asInstanceOf[ModuleBuilder].addType(this)
- return this
- }
-
- /**
- * Adds a new field to the class, with the given name, attributes and field type. The location has no custom mods.
- */
- def DefineField(name: String, fieldType: Type, attrs: Short): FieldBuilder = {
- val fieldTypeWithCustomMods = new PECustomMod(fieldType, null)
- DefineField(name, fieldTypeWithCustomMods, attrs)
- }
-
- /**
- * Adds a new field to the class, with the given name, attributes and (field type , custom mods) combination.
- */
- def DefineField(name: String, fieldTypeWithMods: PECustomMod, attrs: Short): FieldBuilder = {
- val field: FieldBuilder = new FieldBuilder(name, this, attrs, fieldTypeWithMods)
- fieldBuilders += field
- return field
- }
-
- /**
- * Adds a new method to the class, with the given name and
- * method signature.
- */
- def DefineMethod(name: String, attrs: Short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = {
- val method = new MethodBuilder(name, this, attrs, returnType, paramTypes)
- val methods = methodBuilders.iterator
- while(methods.hasNext) {
- val m = methods.next().asInstanceOf[MethodInfo]
- if (methodsEqual(m, method)) {
- throw new RuntimeException("["+ Assembly() + "] Method has already been defined: " + m)
- }
- }
- methodBuilders += method
- return method
- }
-
- /**
- * Adds a new constructor to the class, with the given attributes
- * and signature.
- */
- def DefineConstructor(attrs: Short, callingConvention: Short, paramTypes: Array[Type]): ConstructorBuilder = {
- val constr = new ConstructorBuilder(this, attrs, paramTypes)
- val iter = constructorBuilders.iterator
- while(iter.hasNext) {
- val c = iter.next().asInstanceOf[ConstructorInfo]
- if (constructorsEqual(c, constr)) {
- throw new RuntimeException("["+ Assembly() + "] Constructor has already been defined: " + c)
- }
- }
- constructorBuilders += constr
- return constr
- }
-
- /**
- * Defines a nested type given its name.
- */
- def DefineNestedType(name: String, attributes: Int, baseType: Type, interfaces: Array[Type]): TypeBuilder = {
- val nested = nestedTypeBuilders.iterator
- while(nested.hasNext) {
- val nt = nested.next
- if (nt.Name.equals(name)) {
- val message = "Nested type " + name + " has already been defined: " + nt
- throw new RuntimeException(message)
- }
- }
- val t = new TypeBuilder(Module, attributes, name, baseType, interfaces, this)
- nestedTypeBuilders += t
- return t
- }
-
- /** Get the field with the corresponding name. */
- override def GetField(name: String): FieldInfo = {
- testRaw(name)
- return super.GetField(name)
- }
-
- /** Get all fields of the current Type. */
- override def GetFields(): Array[FieldInfo] = {
- testRaw("<GetFields>")
- return super.GetFields()
- }
-
- /**
- * Searches for a public instance constructor whose parameters
- * match the types in the specified array.
- */
- override def GetConstructor(params: Array[Type]): ConstructorInfo = {
- testRaw(".ctor" + types2String(params))
- return super.GetConstructor(params)
- }
-
- /**
- * Returns all the public constructors defined for the current Type.
- */
- override def GetConstructors(): Array[ConstructorInfo] = {
- testRaw("<GetConstructors>")
- return super.GetConstructors()
- }
-
- /**
- * Searches for the specified public method whose parameters
- * match the specified argument types.
- */
- override def GetMethod(name: String, params: Array[Type]): MethodInfo = {
- testRaw(name + types2String(params))
- return super.GetMethod(name, params)
- }
-
- /** Returns all the public methods of the current Type. */
- override def GetMethods(): Array[MethodInfo] = {
- testRaw("<GetMethods>")
- return super.GetMethods()
- }
-
- /** Searches for the nested type with the specified name. */
- override def GetNestedType(name: String): Type = {
- testRaw(name)
- super.GetNestedType(name)
- }
-
- /** Returns all the types nested within the current Type. */
- override def GetNestedTypes(): Array[Type] = {
- testRaw("<GetNestedTypes>")
- super.GetNestedTypes()
- }
-
- /** Returns a Type object that represents a one-dimensional array of the current type */
- def MakeArrayType(): Type = {
- Type.mkArray(this, 1)
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- def setPosition(sourceLine: Int, sourceFilename: String) {
- this.sourceLine = sourceLine
- this.sourceFilename = sourceFilename
- }
-
- def setSourceFilepath(sourceFilepath: String) {
- this.sourceFilepath = sourceFilepath
- }
-
- //##########################################################################
- // protected members
-
- var sourceLine: Int = _
- var sourceFilename: String = _
- var sourceFilepath: String = _
-
- var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldBuilder]
- var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodBuilder]
- var constructorBuilders = scala.collection.mutable.ArrayBuffer.empty[ConstructorBuilder]
- var nestedTypeBuilders = scala.collection.mutable.ArrayBuffer.empty[TypeBuilder]
-
- // shows if the type is 'raw', i.e. still subject to changes
- private var raw = true
-
- // throws an exception if the type is 'raw',
- // i.e. not finalized by call to CreateType
- protected def testRaw(member: String) {
- if (raw)
- throw new RuntimeException("Not supported for TypeBuilder before CreateType(): " +
- FullName + "::" + member)
- }
-
- //##########################################################################
- // public members not part of the Reflection.Emit.TypeBuilder interface.
-
- /** The apply method for a visitor. */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseTypeBuilder(this)
- }
-
- //##########################################################################
-
-} // class TypeBuilder
-
-object TypeBuilder {
- def types2String(types: Array[Type]): String = {
- var s = new StringBuffer("(")
- for(i <- 0 until types.length) {
- if (i > 0) s.append(", ")
- s.append(types(i))
- }
- s.append(")")
- return s.toString()
- }
-
- def methodsEqual(m1: MethodInfo, m2: MethodInfo): Boolean = {
- if (!m1.Name.equals(m2.Name))
- return false
- if (m1.ReturnType != m2.ReturnType)
- return false
- val p1 = m1.GetParameters()
- val p2 = m2.GetParameters()
- if (p1.length != p2.length)
- return false
- for(i <- 0 until p1.length)
- if (p1(i).ParameterType != p2(i).ParameterType)
- return false
- return true
- }
-
- def constructorsEqual(c1: ConstructorInfo, c2: ConstructorInfo): Boolean = {
- if (c1.IsStatic != c2.IsStatic)
- return false
- val p1 = c1.GetParameters()
- val p2 = c2.GetParameters()
- if (p1.length != p2.length)
- return false
- for(i <- 0 until p1.length)
- if (p1(i).ParameterType != p2(i).ParameterType)
- return false
- return true
-}
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
deleted file mode 100644
index 28ec801dd4..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/**
- * The Visitable interface
- */
-trait Visitable {
-
- //##########################################################################
-
- /**
- * the visitable method to apply a visitor
- */
- @throws(classOf[IOException])
- def apply(v: Visitor): Unit
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
deleted file mode 100644
index d4b84cdd4e..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/**
- * The Visitor interface to walk through the MSIL code Builder hierarchy.
- */
-trait Visitor {
-
- //##########################################################################
-
- /** Visit an AssemblyBuilder */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder): Unit
-
- /** Visit a ModuleBuilder */
- @throws(classOf[IOException])
- def caseModuleBuilder(moduleBuilder: ModuleBuilder): Unit
-
- /** Visit a TypeBuilder */
- @throws(classOf[IOException])
- def caseTypeBuilder(typeBuilder: TypeBuilder): Unit
-
- /** Visit a FieldBuilder */
- @throws(classOf[IOException])
- def caseFieldBuilder(fieldBuilder: FieldBuilder): Unit
-
- /** Visit a ConstructorBuilder */
- @throws(classOf[IOException])
- def caseConstructorBuilder(constructorBuilder: ConstructorBuilder): Unit
-
- /** Visit a MethodBuilder */
- @throws(classOf[IOException])
- def caseMethodBuilder(methodBuilder: MethodBuilder): Unit
-
- /** Visit a ParameterBuilder */
- @throws(classOf[IOException])
- def caseParameterBuilder(parameterBuilder: ParameterBuilder): Unit
-
- /** Visit an ILGenerator */
- @throws(classOf[IOException])
- def caseILGenerator(iLGenerator: ILGenerator): Unit
-
- /** Visit an OpCode */
- @throws(classOf[IOException])
- def caseOpCode(opCode: OpCode): Unit
-
- /** Visit a LocalBuilder */
- @throws(classOf[IOException])
- def caseLocalBuilder(localBuilder: LocalBuilder): Unit
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
deleted file mode 100644
index 9a6e28a545..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class CustomAttributesTest {
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("You must supply a filename!");
- System.exit(1);
- }
-
- Assembly assem = Assembly.LoadFrom(args[0]);
- Type.initMSCORLIB(assem);
-
- testCustomAttributes();
- }
-
- public static void testCustomAttributes() {
- Object[] attrs = Type.GetType("System.ObsoleteAttribute")
- .GetCustomAttributes(false);
- assert attrs != null;
- for (int i = 0; i < attrs.length; i++) {
- System.out.println("\t" + attrs[i]);
- }
- }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
deleted file mode 100644
index 96ec1bfeea..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
+++ /dev/null
@@ -1,18 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.VJSAssembly;
-
-public class JavaTypeTest {
-
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("usage: java test.JavaTypeTest classname");
- System.exit(1);
- }
-
- Type type = VJSAssembly.VJSLIB.GetType(args[0]);
- MembersTest.dumpType(System.out, type);
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
deleted file mode 100644
index 37a5c6ea90..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
+++ /dev/null
@@ -1,100 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class MembersTest {
-
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println
- ("usage: java test.MembersTest assembly [classname]");
- System.exit(1);
- }
-
- Assembly mscorlib = Assembly.LoadFrom("mscorlib.dll");
- Type.initMSCORLIB(mscorlib);
- Assembly assem = Assembly.LoadFrom(args[0]);
- if (args.length > 1) {
- Type type = assem.GetType(args[1]);
- if (type != null)
- dumpMember(System.out, type);
- else System.err.println("Cannot find type " + args[1]
- + " in " + assem);
- } else {
- Type[] types = assem.GetTypes();
- System.out.println("Number of types in assembly " + assem
- + " -> " + types.length);
- dumpCustomAttributes(System.out, "assembly: ", assem);
- Module[] modules = assem.GetModules();
- for (int i = 0; i < modules.length; i++) {
- dumpCustomAttributes(System.out, "module " + modules[i] + ": ",
- modules[i]);
- }
- dumpMembers(System.out, types);
- }
- }
-
- public static final void dumpMember(PrintStream out, MemberInfo member) {
- try {
- if (member.MemberType() == MemberTypes.TypeInfo
- || member.MemberType() == MemberTypes.NestedType) {
- Type type = (Type)member;
- dumpCustomAttributes(out, "", type);
- out.print(TypeAttributes.accessModsToString(type.Attributes));
- out.print(type.IsInterface() ? " interface " : " class ");
- out.print(type);
- if (type.BaseType() != null)
- out.println(" extends " + type.BaseType());
- Type[] ifaces = type.GetInterfaces();
- if (ifaces.length > 0) {
- out.print("\timplements ");
- for (int i = 0; i < ifaces.length; i++) {
- out.print(ifaces[i]);
- if (i < (ifaces.length - 1))
- out.print(", ");
- }
- out.println();
- }
- out.println("{");
- int all = BindingFlags.Public | BindingFlags.DeclaredOnly// | BindingFlags.NonPublic
- | BindingFlags.Instance | BindingFlags.Static;
- dumpMembers(out, type.GetNestedTypes());
- dumpMembers(out, type.GetFields(all));
- dumpMembers(out, type.GetConstructors(all));
- dumpMembers(out, type.GetMethods(all));
- dumpMembers(out, type.GetProperties(all));
- dumpMembers(out, type.GetEvents());
- out.println("}");
- } else {
- dumpCustomAttributes(out, "", member);
- out.print(MemberTypes.toString(member.MemberType()));
- out.print(": "); out.print(member);
- out.println();
- }
- } catch (Throwable e) {
- String message = MemberTypes.toString(member.MemberType())
- + ": " + member;
- throw new RuntimeException(message, e);
- }
- }
-
- public static void dumpCustomAttributes(PrintStream out,
- String prefix,
- ICustomAttributeProvider att)
- {
- Object[] attrs = att.GetCustomAttributes(false);
- for (int j = 0; j < attrs.length; j++)
- out.println(prefix + attrs[j]);
- }
-
- public static void dumpMembers(PrintStream out, MemberInfo[] members) {
- for (int i = 0; i < members.length; i++) {
- dumpMember(out, members[i]);
- }
- }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
deleted file mode 100644
index 1df389b011..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
+++ /dev/null
@@ -1,311 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.io.PrintStream;
-import java.io.FileNotFoundException;
-
-public class TableDump extends PEFile {
-
- //##########################################################################
-
- public TableDump(String filename) throws FileNotFoundException {
- super(filename);
- }
-
- /***/
- public void dump(PrintStream out) {
- out.println("CLI RVA: " + CLI_RVA);
- out.println("Optional header size: " + optHeaderSize);
- out.println("Number of sections: " + numOfSections);
- out.println();
-
- for (int i = 0; i < sections.length; i++) {
- sections[i].dump(out);
- out.println();
- }
-
- out.println("MetaData Offset: 0x" + Integer.toHexString(posMetadata));
- out.println("Number of streams: " + numOfStreams);
-
- out.println("#~ stream"); Meta.dump(out); out.println();
- out.println("#Strings stream"); Strings.dump(out); out.println();
- if (US != null) {
- out.println("#US stream"); US.dump(out); out.println();
- }
- out.println("#GUID stream"); GUID.dump(out); out.println();
- out.println("#Blob stream"); Blob.dump(out); out.println();
-
- out.println("Heap Sizes IndexedSeq = 0x0" + Integer.toHexString(heapSizes));
- out.println();
-
- for(int i = 0; i < Table.MAX_NUMBER; i++)
- if(getTable(i).rows > 0) {
- dump(out, getTable(i));
- out.println();
- }
-
- }
-
- /** Dumps the contents of this table. */
- public void dump(PrintStream out, Table table) {
- out.println("Table:" + " ID = 0x" + byte2hex(table.id));
- out.println("\tname = " + table.getTableName());
- out.println("\trows = " + table.rows);
- //out.println("\tStart pos in file = 0x" + Long.toHexString(table.start));
- for (int i = 1; i <= table.rows; i++)
- dumpRow(out, table, i);
- }
-
- public void dumpIndex(PrintStream out, int tableSetId, int index) {
- int tableId = Table.getTableId(tableSetId, index);
- int row = Table.getTableIndex(tableSetId, index);
- out.print(getTable(tableId).getTableName());
- out.print('[');
- out.print(getTable(tableId).isShort ? short2hex(row) : int2hex(row));
- out.print(']');
- }
-
- public void dumpRow(PrintStream out, Table table, int row) {
- table.readRow(row);
- out.print(table.getTableName());
- out.print("[" + short2hex(row) + "]: ");
- dumpRow(out, table);
- out.println();
- }
-
- /** Prints the current content of the fields of the class. */
- public void dumpRow(PrintStream out, Table table) {
- if (table instanceof ModuleDef) {
- ModuleDef t = (ModuleDef)table;
- out.print("Generation = 0x" + short2hex(t.Generation));
- out.print("; Name = " + getString(t.Name));
- //out.print("; Mvid = (" + bytes2hex(getGUID(Mvid)) + ")");
- } else if (table instanceof TypeRef) {
- TypeRef t = (TypeRef)table;
- out.print("FullName = " + t.getFullName());
- out.print("; ResolutionScope = 0x" + int2hex(t.ResolutionScope));
- } else if (table instanceof TypeDef) {
- TypeDef t = (TypeDef)table;
- out.print("Flags = 0x"); out.print(int2hex(t.Flags));
- out.print("; FullName = "); out.print(t.getFullName());
- out.print("; Extends = ");
- dumpIndex(out, Table._TypeDefOrRef, t.Extends);
- out.print("; FieldList = "); out.print(t.FieldList);
- out.print("; MethodList = "); out.print(t.MethodList);
- } else if (table instanceof FieldTrans) {
- FieldTrans t = (FieldTrans)table;
- out.print("Field = "); out.print(t.Field);
- } else if (table instanceof FieldDef) {
- FieldDef t = (FieldDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof MethodTrans) {
- MethodTrans t = (MethodTrans)table;
- out.print("Method = "); out.print(t.Method);
- } else if (table instanceof MethodDef) {
- MethodDef t = (MethodDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; ParamList = " + t.ParamList);
- out.print("; Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof ParamDef) {
- ParamDef t = (ParamDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; Sequence = " + t.Sequence);
- } else if (table instanceof InterfaceImpl) {
- InterfaceImpl t = (InterfaceImpl)table;
- out.print("Class = 0x" + short2hex(t.Class));// + " (ref to: ");
- //TypeDef td = (TypeDef) getTable(TypeDef.ID);
- //td.readRow(Class);
- //td.dumpRow(out);
- out.print("; Interface = 0x" + short2hex(t.Interface));
- } else if (table instanceof MemberRef) {
- MemberRef t = (MemberRef)table;
- out.print("Name = " + t.getName());
- out.print("; Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- out.print("; Class = " + t.Class);
- } else if (table instanceof Constant) {
- Constant t = (Constant)table;
- out.print("Parent = "); dumpIndex(out, Table._HasConstant, t.Parent);
- out.print("; Type = 0x" + byte2hex(t.Type));
- out.print("; Value = (" + bytes2hex(getBlob(t.Value)));
- out.print("); Value = " + t.getValue());
- } else if (table instanceof CustomAttribute) {
- CustomAttribute t = (CustomAttribute)table;
- //out.print("Parent = 0x" + int2hex(t.Parent));
- out.print("Parent = ");
- dumpIndex(out, Table._HasCustomAttribute, t.Parent);
- //out.print("; Type = 0x" + short2hex(t.Type));
- out.print("; Type = ");
- dumpIndex(out, Table._CustomAttributeType, t.Type);
- out.print("; Value = (" + bytes2hex(t.getValue()) + ")");
- } else if (table instanceof FieldMarshal) {
- FieldMarshal t = (FieldMarshal)table;
- out.print("NativeType = (");
- out.print(bytes2hex(getBlob(t.NativeType)) + ")");
- } else if (table instanceof DeclSecurity) {
- DeclSecurity t = (DeclSecurity)table;
- out.print("Action = 0x" + short2hex(t.Action));
- out.print("; PermissionSet = (" +
- bytes2hex(getBlob(t.PermissionSet)) + ")");
- } else if (table instanceof ClassLayout) {
- ClassLayout t = (ClassLayout)table;
- out.print("PackingSize = 0x" + short2hex(t.PackingSize));
- out.print("; ClassSize = 0x" + int2hex(t.ClassSize));
- out.print(": Parent = " + t.Parent + " (ref to: ");
- dumpRow(out, this.TypeDef(t.Parent));
- out.print(")");
- } else if (table instanceof FieldLayout) {
- FieldLayout t = (FieldLayout)table;
- out.print("Offset = 0x" + int2hex(t.Offset));
- out.print("; Field = (ref to: ");
- dumpRow(out, this.FieldDef(t.Field));
- out.print(")");
- } else if (table instanceof StandAloneSig) {
- StandAloneSig t = (StandAloneSig)table;
- out.print("StandAloneSig: Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof EventMap) {
- EventMap t = (EventMap)table;
- out.print("Parent = 0x" + int2hex(t.Parent) + " (ref to: ");
- dumpRow(out, this.TypeDef(t.Parent));
- out.print("); EventList = 0x"); out.print(int2hex(t.EventList));
- } else if (table instanceof EventDef) {
- EventDef t = (EventDef)table;
- out.print("EventFlags = 0x" + short2hex(t.EventFlags));
- out.print("; Name = " + t.getName());
- out.print("; EventType = 0x" + int2hex(t.EventType));
- } else if (table instanceof PropertyMap) {
- PropertyMap t = (PropertyMap)table;
- out.print("Parent = " + t.Parent + " (ref to: ");
- dumpRow(out, this.TypeDef(t.Parent));
- out.print(")");
- } else if (table instanceof PropertyDef) {
- PropertyDef t = (PropertyDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; Type = (" + bytes2hex(getBlob(t.Type)) + ")");
- } else if (table instanceof MethodSemantics) {
- MethodSemantics t = (MethodSemantics)table;
- out.print("Semantics = 0x" + short2hex(t.Semantics));
- out.print("; Method = 0x" + int2hex(t.Method) + " (ref to: ");
- dumpRow(out, this.MethodDef(t.Method));
- out.print("); Association = 0x" + int2hex(t.Association));
- } else if (table instanceof MethodImpl) {
- MethodImpl t = (MethodImpl)table;
- out.print("Class = (ref to: ");
- dumpRow(out, this.TypeDef(t.Class));
- out.print(")");
- } else if (table instanceof ModuleRef) {
- ModuleRef t = (ModuleRef)table;
- out.print("Name = " + t.getName());
- } else if (table instanceof TypeSpec) {
- TypeSpec t = (TypeSpec)table;
- out.print("Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof ImplMap) {
- ImplMap t = (ImplMap)table;
- out.print("ImportName = " + getString(t.ImportName));
- } else if (table instanceof FieldRVA) {
- FieldRVA t = (FieldRVA)table;
- out.print("RVA = 0x" + int2hex(t.RVA));
- out.print("; Field = (ref to: ");
- dumpRow(out, this.FieldDef(t.Field));
- out.print(")");
- } else if (table instanceof AssemblyDef) {
- AssemblyDef t = (AssemblyDef)table;
- out.print("Flags = 0x" + int2hex(t.Flags));
- out.print(" ; Name = " + getString(t.Name));
- out.print("; Culture = " + getString(t.Culture));
- out.print(" ; Version = " + t.MajorVersion + ".");
- out.print(t.MinorVersion + "." + t.BuildNumber);
- out.print("." + t.RevisionNumber);
- out.print("; HashAlgId = 0x" + int2hex(t.HashAlgId));
- out.print("; PublicKey = (");
- out.print(bytes2hex(getBlob(t.PublicKey)) + ")");
- } else if (table instanceof AssemblyProcessor) {
- AssemblyProcessor t = (AssemblyProcessor)table;
- out.print("Processor = 0x" + int2hex(t.Processor));
- } else if (table instanceof AssemblyOS) {
- AssemblyOS t = (AssemblyOS)table;
- out.print("!?!");
- } else if (table instanceof AssemblyRef) {
- AssemblyRef t = (AssemblyRef)table;
- out.print("Flags = 0x" + int2hex(t.Flags));
- out.print("; Name = " + getString(t.Name));
- out.print("; Culture = " + getString(t.Culture));
- out.print("; Version = " + t.MajorVersion + "." + t.MinorVersion);
- out.print("." + t.BuildNumber + "." + t.RevisionNumber);
- out.print("; PublicKeyOrToken = (" +
- bytes2hex(getBlob(t.PublicKeyOrToken)) + ")");
- out.print("; HashValue = (" +
- bytes2hex(getBlob(t.HashValue)) + ")");
- } else if (table instanceof AssemblyRefProcessor) {
- AssemblyRefProcessor t = (AssemblyRefProcessor)table;
- out.print("!?!");
- } else if (table instanceof AssemblyRefOS) {
- AssemblyRefOS t = (AssemblyRefOS)table;
- out.print("!?!");
- } else if (table instanceof FileDef) {
- FileDef t = (FileDef)table;
- out.print("Flags = 0x" + int2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; HashValue = (" + bytes2hex(getBlob(t.HashValue)) +")");
- } else if (table instanceof ExportedType) {
- ExportedType t = (ExportedType)table;
- out.print("FullName = " + t.getFullName());
- } else if (table instanceof ManifestResource) {
- ManifestResource t = (ManifestResource)table;
- out.print("Name = " + getString(t.Name));
- out.print("; Flags = 0x" + int2hex(t.Flags));
- } else if (table instanceof NestedClass) {
- NestedClass t = (NestedClass)table;
- out.print(this.TypeDef(t.EnclosingClass).getFullName());
- out.print("/");
- out.print(this.TypeDef(t.NestedClass).getFullName());
- } else
- throw new RuntimeException("Unknown table " + table.getClass());
- }
-
- //##########################################################################
-
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("You must supply a filename!");
- System.exit(1);
- }
-
- TableDump file = null;
- try {
- file = new TableDump(args[0]);
- } catch (FileNotFoundException e) { e.printStackTrace(); }
-
- if (args.length > 1) {
- nextarg:
- for (int i = 1; i < args.length; i++) {
- String name = args[i];
- for (int tableId = 0; tableId < Table.MAX_NUMBER; tableId++) {
- Table table = file.getTable(tableId);
- if ((table.rows > 0) && name.equals(table.getTableName())) {
- file.dump(System.out, table);
- System.out.println();
- continue nextarg;
- }
- }
- System.err.println("No such table: " + name);
- }
- } else
- file.dump(System.out);
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
deleted file mode 100644
index 2c5946a734..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
+++ /dev/null
@@ -1,92 +0,0 @@
-
-package test;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class Test {
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("You must supply a filename!");
- System.exit(1);
- }
-
- Assembly assem = Assembly.LoadFrom(args[0]);
- Type.initMSCORLIB(assem);
-
- //"System.Collections.ArrayList"
- if (args.length >= 2) {
- Type t = Type.GetType(args[1]);
- dumpType(System.out, t);
- } else {
- dumpAssembly(assem);
- }
- }
-
-
- public static void dumpAssembly(Assembly assem) {
- Module[] modules = assem.GetModules();
-// System.out.println("Modules in assembly " + assem +
-// " (" + modules.length + ")");
-// for (int i = 0; i < modules.length; i++) {
-// System.out.println("\t" + modules[i]);
-// }
-
- Type[] types = modules[0].GetTypes();
-// System.out.println("Types in assembly " + assem +
-// " (" + types.length + ")");
- for (int i = 0; i < types.length; i++) {
- System.out.println("#" + i + " -> " + types[i]);
- types[i].completeType();
- }
- }
-
- public static final void dumpType(PrintStream out, Type type) {
- out.println("Type = " + type);
- out.println("Name = " + type.Name);
- out.println("Namespace = " + type.Namespace);
- out.println("FullName = " + type.FullName);
- out.println("Attributes = " + TypeAttributes.toString(type.Attributes));
- out.println("BaseType = " + type.BaseType);
- Type[] ifaces = type.GetInterfaces();
- if (ifaces != null) {
- for (int i = 0; i < ifaces.length; i++)
- out.println("\timplements " + ifaces[i]);
- }
- out.println("Assembly = " + type.Assembly);
- out.println("Module = " + type.Module);
- out.println("DeclaringType = " + type.DeclaringType);
- out.println("IsInterface = " + type.IsInterface);
- out.println("IsAbstract = " + type.IsAbstract);
-
- FieldInfo[] fields = type.GetFields(BindingFlags.Instance
- | BindingFlags.Static
- | BindingFlags.NonPublic);
- out.println("\nFields (" + fields.length + "):");
- for (int i = 0; i < fields.length; i++) {
- out.println("\t" + fields[i]);
- out.println("\t\tDeclaringType = " + fields[i].DeclaringType);
- out.println("\t\tReflectedType = " + fields[i].ReflectedType);
- }
-
- ConstructorInfo[] constrs = type.GetConstructors();
- out.println("\nConstructors (" + constrs.length + "):");
- for (int i = 0; i < constrs.length; i++) {
- out.println("\t" + constrs[i]);
- }
-
-// MethodInfo[] methods = type.GetMethods(BindingFlags.Instance
-// | BindingFlags.Static
-// | BindingFlags.Public
-// | BindingFlags.NonPublic);
- MethodInfo[] methods = type.GetMethods();
- out.println("\nMethods (" + methods.length + "):");
- for (int i = 0; i < methods.length; i++) {
- out.println("\t" + methods[i]);
- out.println("\t\tDeclaringType = " + methods[i].DeclaringType);
- out.println("\t\tReflectedType = " + methods[i].ReflectedType);
- }
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java
deleted file mode 100644
index 56519e8487..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.Type;
-import ch.epfl.lamp.compiler.msil.CustomModifier;
-
-/**
- * A PECustomMod holds the info parsed from metadata per the CustomMod production in Sec. 23.2.7, Partition II.
- * */
-public final class PECustomMod {
-
- public final Type marked;
- public final CustomModifier[] cmods;
-
- /** Terminology:
- the CustomModifier(s) are markers,
- and the msil.Type is a type marked by those markers. */
- public PECustomMod(Type marked, CustomModifier[] cmods) {
- this.marked = marked;
- this.cmods = cmods;
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
deleted file mode 100644
index 454a94e55c..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-
-import java.io.PrintStream;
-
-/** Describes a section from a PE/COFF file
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PESection {
-
- private final PEFile file;
- private final long sectionStart;
-
- public final String name;
- public final int virtAddr;
- public final int virtSize;
- public final int realAddr;
- public final int realSize;
- public final int flags;
-
- private static final byte[] buf = new byte[8];
-
- public PESection(PEFile file) {
- this.file = file;
- sectionStart = file.pos();
- file.read(buf);
- int i;
- for(i = 7; (i >= 0) && (0 == buf[i]); i--);
- name = new String(buf, 0, i + 1);
- virtSize = file.readInt();
- virtAddr = file.readInt();
- realSize = file.readInt();
- realAddr = file.readInt();
- file.skip(3 * PEFile.INT_SIZE);
- flags = file.readInt();
- }
-
-
- public void dump(PrintStream out) {
- out.println("Section name: " + name +
- " (name.length=" + name.length() + ")");
- out.println("Virtual Address: 0x" + PEFile.int2hex(virtAddr));
- out.println("Virtual Size: 0x" + PEFile.int2hex(virtSize));
- out.println("Real Address: 0x" + PEFile.int2hex(realAddr));
- out.println("Real Size: 0x" + PEFile.int2hex(realSize));
- out.println("Flags: 0x" + PEFile.int2hex(flags));
- }
-
-} // class PESection
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
deleted file mode 100644
index 649d9e74f2..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import java.io.PrintStream;
-import java.io.IOException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-
-/**
- * Implements support for CLI streams within a PE file.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PEStream implements Signature {
-
- //##########################################################################
- // Members
-
- /** The name of the stream. */
- public final String name;
-
- /** The offset of the stream from the beginning of the file. */
- public final int offset;
-
- /** The size of the stream in bytes; shall be multiple of 4. */
- public final int size;
-
- private final PEFile file;
-
- private final ByteBuffer buffer;
-
- //##########################################################################
-
- /** The PEStream class constructor.
- * @param file - the PEFile to which this stream belongs
- */
- public PEStream(PEFile file) {
- this.file = file;
- offset = file.fromRVA(file.rvaMetadata + file.readInt());
- size = file.readInt();
- buffer = file.getBuffer(offset, size);
-
- int i = 0;
- byte [] _buf = new byte [16];
- do {
- _buf[i] = (byte) file.readByte();
- i++;
- } while(0 != _buf[i-1]);
- name = new String(_buf, 0, i - 1);
-
- file.align(PEFile.INT_SIZE, file.posMetadata);
- //assert size % 4 == 0;
- }
-
- /** Move to the specified position in the stream. */
- private void seek(int pos) {
- try {
- buffer.position(pos);
- } catch (IllegalArgumentException e) {
- System.err.println("\nSeek failed in file " + file
- + " for position " + pos
- + " of stream " + name + " (" + buffer + ")");
- throw e;
- }
- }
-
- /** Return a string from the specified position in the stream. */
- public String getString(int pos) {
- seek(pos);
- buffer.mark();
- int i;
- for (i = 0; getByte() != 0; i++);
- byte[] buf = new byte[i];
- buffer.reset(); // go back to the marked position
- buffer.get(buf);
- try {
- return new String(buf, "UTF-8");
- } catch (java.io.UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
- /** Read a byte from the stream. */
- public int getByte() {
- return (buffer.get() + 0x0100) & 0xff;
- }
-
- /** Return the GUID at the given position in the stream. */
- public byte[] getGUID(int pos) {
- seek(pos);
- byte[] buf = new byte[32]; // 128-bit GUID
- try {
- buffer.get(buf);
- } catch (Exception e) {
- System.err.println();
- System.err.println("PEStream.getBlob(): Exception for pos = " +
- pos + " and buf.length = " + buf.length);
- System.err.println("\tbuffer = " + buffer);
- e.printStackTrace();
- throw new RuntimeException();
- }
- return buf;
- }
-
- public int readLength() {
- int length = getByte();
- if ((length & 0x80) != 0) {
- length = ((length & 0x7f) << 8) | getByte();
- if ((length & 0x4000) != 0)
- length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte();
- }
- return length;
- }
-
- /** Return a blob from the specified position in the stream. */
- public byte[] getBlob(int pos) {
- seek(pos);
- // the length indicates the number of bytes
- // AFTER the encoded size of the blob
- int length = readLength();
- byte[] buf = new byte[length];
- buffer.get(buf);
- return buf;
- }
-
- /***/
- public Sig getSignature(int pos) {
- seek(pos);
- return file.newSignature(buffer);
- }
-
- /**
- */
- public Object getConstant(int type, int pos) {
- Object val = null;
- seek(pos);
- int length = readLength(); // skip over the blob length field
- switch (type) {
- case ELEMENT_TYPE_BOOLEAN:
- assert length == 1;
- return buffer.get() == 0 ? Boolean.FALSE : Boolean.TRUE;
- case ELEMENT_TYPE_CHAR:
- assert length == 2 : "length == " + length;
- return new Character(buffer.getChar());
- case ELEMENT_TYPE_I1:
- case ELEMENT_TYPE_U1: // TODO U1 not the same as I1
- assert length == 1;
- return new Byte(buffer.get());
- case ELEMENT_TYPE_I2:
- case ELEMENT_TYPE_U2:
- assert length == 2;
- return new Short(buffer.getShort());
- case ELEMENT_TYPE_I4:
- case ELEMENT_TYPE_U4:
- assert length == 4;
- return new Integer(buffer.getInt());
- case ELEMENT_TYPE_I8:
- case ELEMENT_TYPE_U8:
- assert length == 8;
- return new Long(buffer.getLong());
- case ELEMENT_TYPE_R4:
- assert length == 4;
- return new Float(buffer.getFloat());
- case ELEMENT_TYPE_R8:
- assert length == 8;
- return new Double(buffer.getDouble());
- case ELEMENT_TYPE_STRING:
-// length /= 2;
-// char[] chars = new char[length];
-// for (int i = 0; i < length; i++)
-// chars[i] = buffer.getChar();
-// val = new String(chars);
- try {
- return new String(getBlob(pos), "UTF-16LE");
- } catch(java.io.UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- default: throw new RuntimeException("Illegal constant type: " + type);
- }
- }
-
- public void dump(PrintStream out) {
- out.println("Stream name: " + name + " (length " +
- name.length() + " characters)");
- out.println("Stream offset: 0x" + PEFile.int2hex(offset));
- out.println("Stream size: 0x" + PEFile.int2hex(size));
- }
-
- //##########################################################################
-} // class PEStream
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
deleted file mode 100644
index d5dc0ff32c..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.Type;
-
-/**
- * Signatures
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public interface Signature {
-
- //##########################################################################
-
- /** Marks end of a list. */
- public static final int ELEMENT_TYPE_END = 0x00;
- /** void */
- public static final int ELEMENT_TYPE_VOID = 0x01;
- /** boolean */
- public static final int ELEMENT_TYPE_BOOLEAN = 0x02;
- /** char */
- public static final int ELEMENT_TYPE_CHAR = 0x03;
- /** signed byte */
- public static final int ELEMENT_TYPE_I1 = 0x04;
- /** byte */
- public static final int ELEMENT_TYPE_U1 = 0x05;
- /** short */
- public static final int ELEMENT_TYPE_I2 = 0x06;
- /** unsigned short */
- public static final int ELEMENT_TYPE_U2 = 0x07;
- /** int */
- public static final int ELEMENT_TYPE_I4 = 0x08;
- /** unsigned int */
- public static final int ELEMENT_TYPE_U4 = 0x09;
- /** long */
- public static final int ELEMENT_TYPE_I8 = 0x0a;
- /** unsigned long */
- public static final int ELEMENT_TYPE_U8 = 0x0b;
- /** float */
- public static final int ELEMENT_TYPE_R4 = 0x0c;
- /** double */
- public static final int ELEMENT_TYPE_R8 = 0x0d;
- /** string */
- public static final int ELEMENT_TYPE_STRING = 0x0e;
- /** Followed by <type> token. */
- public static final int ELEMENT_TYPE_PTR = 0x0f;
- /** Followed by <type> token. */
- public static final int ELEMENT_TYPE_BYREF = 0x10;
- /** Followed by <type> token */
- public static final int ELEMENT_TYPE_VALUETYPE = 0x11;
- /** Followed by <type> token */
- public static final int ELEMENT_TYPE_CLASS = 0x12;
-
- public static final int ELEMENT_TYPE_VAR = 0x13;
-
- /**
- * <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
- */
- public static final int ELEMENT_TYPE_ARRAY = 0x14;
-
- public static final int ELEMENT_TYPE_GENERICINST = 0x15;
- /***/
- public static final int ELEMENT_TYPE_TYPEDBYREF = 0x16;
- /** System.IntPtr */
- public static final int ELEMENT_TYPE_I = 0x18;
- /** System.UIntPtr */
- public static final int ELEMENT_TYPE_U = 0x19;
- /** Followed by full method signature. */
- public static final int ELEMENT_TYPE_FNPTR = 0x1b;
- /** System.Object. */
- public static final int ELEMENT_TYPE_OBJECT = 0x1c;
- /** Single-dim array with 0 lower bound. */
- public static final int ELEMENT_TYPE_SZARRAY = 0x1d;
-
- public static final int ELEMENT_TYPE_MVAR = 0x1e;
-
- /** Required modifier : followed by a TypeDef or TypeRef token. */
- public static final int ELEMENT_TYPE_CMOD_REQD = 0x1f;
- /** Optional modifier : followed by a TypeDef or TypeRef token. */
- public static final int ELEMENT_TYPE_CMOD_OPT = 0x20;
- /** Implemented within the CLI. */
- public static final int ELEMENT_TYPE_INTERNAL = 0x21;
- /** Or'd with following element types. */
- public static final int ELEMENT_TYPE_MODIFIER = 0x40;
- /** Sentinel for varargs method signature. */
- public static final int ELEMENT_TYPE_SENTINEL = 0x41;
- /**Denotes a local variable that points at a pinned object. */
- public static final int ELEMENT_TYPE_PINNED = 0x45;
-
- //##########################################################################
- // signature designators
-
- public static final int HASTHIS = 0x20;
- public static final int EXPLICITTHIS = 0x40;
- public static final int DEFAULT = 0x00;
- public static final int VARARG = 0x05;
- public static final int GENERIC = 0x10;
- public static final int SENTINEL = 0x41;
- public static final int C = 0x01;
- public static final int STDCALL = 0x02;
- public static final int THISCALL = 0x03;
- public static final int FASTCALL = 0x04;
- public static final int FIELD = 0x06;
- public static final int PROPERTY = 0x08;
- public static final int LOCAL_SIG = 0x07;
-
- //##########################################################################
- // extra IDs used in the serialization format of named arguments
- // to custom attributes. Reverse-engineered from compiled C# example
-
- /** What follows is a string with the full name of the type. */
- public static final int X_ELEMENT_TYPE_TYPE = 0x50;
-
- /** What follows is a string with the full name of the enumeration type*/
- public static final int X_ELEMENT_TYPE_ENUM = 0x55;
-
- /** The named argument specifies a field. */
- public static final int X_ELEMENT_KIND_FIELD = 0x53;
-
- /** The named argument specifies a property. */
- public static final int X_ELEMENT_KIND_PROPERTY = 0x54;
-
- //##########################################################################
-} // interface Signature
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
deleted file mode 100644
index 1f43b8c2fa..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
+++ /dev/null
@@ -1,1859 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET Assemblies
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import java.io.PrintStream;
-import java.nio.ByteBuffer;
-import java.nio.MappedByteBuffer;
-
-/**
- * Represents a table in a .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Table {
-
- //##########################################################################
-
- public static final int MAX_NUMBER = 64;
-
- public static final long VALID_TABLES_MASK = 0x03ff3fb7ff57L;
-
- //##########################################################################
- // fields and methods for handling predefined sets of tables
-
- public static final int TABLE_SET_LENGTH = 13;
-
- public static final int _TypeDefOrRef = 0;
- public static final int _HasConstant = 1;
- public static final int _HasCustomAttribute = 2;
- public static final int _HasFieldMarshal = 3;
- public static final int _HasDeclSecurity = 4;
- public static final int _MemberRefParent = 5;
- public static final int _HasSemantics = 6;
- public static final int _MethodDefOrRef = 7;
- public static final int _MemberForwarded = 8;
- public static final int _Implementation = 9;
- public static final int _CustomAttributeType = 10;
- public static final int _ResolutionScope = 11;
- public static final int _TypeOrMethodDef = 12;
-
-
- public static final int[][] TableSet = new int[TABLE_SET_LENGTH][];
-
- static {
- TableSet[_TypeDefOrRef] =
- new int[] {TypeDef.ID, TypeRef.ID, TypeSpec.ID};
- TableSet[_HasConstant] =
- new int[] {FieldDef.ID, ParamDef.ID, PropertyDef.ID};
- TableSet[_HasCustomAttribute] =
- new int[] {MethodDef.ID, FieldDef.ID, TypeRef.ID, TypeDef.ID,
- ParamDef.ID, InterfaceImpl.ID, MemberRef.ID, ModuleDef.ID,
- -1, PropertyDef.ID, EventDef.ID, -1, ModuleRef.ID,
- TypeSpec.ID, AssemblyDef.ID, AssemblyRef.ID,
- FileDef.ID, ExportedType.ID, ManifestResource.ID};
- TableSet[_HasFieldMarshal] =
- new int[] {FieldDef.ID, ParamDef.ID};
- TableSet[_HasDeclSecurity] =
- new int[] {TypeDef.ID, MethodDef.ID, AssemblyDef.ID};
- TableSet[_MemberRefParent] =
- new int[] {-1, TypeRef.ID, ModuleRef.ID, MethodDef.ID, TypeSpec.ID};
- TableSet[_HasSemantics] =
- new int[] {EventDef.ID, PropertyDef.ID};
- TableSet[_MethodDefOrRef] =
- new int[] {MethodDef.ID, MemberRef.ID};
- TableSet[_MemberForwarded] =
- new int[] {FieldDef.ID, MethodDef.ID};
- TableSet[_Implementation] =
- new int[] {FileDef.ID, AssemblyRef.ID, ExportedType.ID};
- TableSet[_CustomAttributeType] =
- new int[] {-1, -1, MethodDef.ID, MemberRef.ID, -1};
- TableSet[_ResolutionScope] =
- new int[] {ModuleDef.ID, ModuleRef.ID, AssemblyRef.ID, TypeRef.ID};
- TableSet[_TypeOrMethodDef] =
- new int[]{TypeDef.ID, MethodDef.ID};
- }
-
- public static final int[] NoBits =
- new int[]{2, 2, 5, 1, 2, 3, 1, 1, 1, 2, 3, 2, 1};
-
- public static int getMask(int tableSetId) {
- return (1 << NoBits[tableSetId]) - 1;
- }
-
- public static int getTableId(int tableSet, int index) {
- return TableSet[tableSet][index & getMask(tableSet)];
- }
-
- public static int getTableIndex(int tableSet, int index) {
- return index >> NoBits[tableSet];
- }
-
- public static int encodeIndex(int index, int tableSetId, int tableId) {
- int[] tableSet = TableSet[tableSetId];
- for (int i = 0; i < tableSet.length; i++) {
- if (tableSet[i] == tableId)
- return (index << NoBits[tableSetId]) | i;
- }
- throw new RuntimeException("Cannot find table #" + tableId +
- " in table set #" + tableSetId);
- }
-
- //##########################################################################
-
- private static final String [] tableName = {
- "Module", "TypeRef", "TypeDef", " FieldTrans",
- "Field", "MethodTrans", "Method", "",
- "Param", "InterfaceImpl", "MemberRef", "Constant",
- "CustomAttribute", "FieldMarshal", "DeclSecurity","ClassLayout",
- "FieldLayout", "StandAloneSig", "EventMap", "",
- "Event", "PropertyMap", "", "Property",
- "MethodSemantics", "MethodImpl", "ModuleRef", "TypeSpec",
- "ImplMap", "FieldRVA", "", "",
- "Assembly", "AssemblyProcessor","AssemblyOS", "AssemblyRef",
- "AssemblyRefProcessor","AssemblyRefOS", "File", "ExportedType",
- "ManifestResource", "NestedClass", "GenericParam", "MethodSpec",
- "GenericParamConstraint", "", "", "",
- "", "", "", "",
- "", "", "", "",//0x30-0x37
- "", "", "", "",
- "", "", "", "" //0x37-0x3f
- };
-
- /** Creates a table with the given id and number of rows.
- */
- public static Table newTable(PEFile file, int id, int rows) {
- Table table = null;
- switch(id) {
- case ModuleDef.ID: table = new ModuleDef(file, rows); break;
- case TypeRef.ID: table = new TypeRef(file, rows); break;
- case TypeDef.ID: table = new TypeDef(file, rows); break;
- case FieldTrans.ID: table = new FieldTrans(file, rows); break;
- case FieldDef.ID: table = new FieldDef(file, rows); break;
- case MethodTrans.ID: table = new MethodTrans(file, rows); break;
- case MethodDef.ID: table = new MethodDef(file, rows); break;
- case ParamDef.ID: table = new ParamDef(file, rows); break;
- case InterfaceImpl.ID: table = new InterfaceImpl(file, rows); break;
- case MemberRef.ID: table = new MemberRef(file, rows); break;
- case Constant.ID: table = new Constant(file, rows); break;
- case CustomAttribute.ID: table = new CustomAttribute(file, rows); break;
- case FieldMarshal.ID: table = new FieldMarshal(file, rows); break;
- case DeclSecurity.ID: table = new DeclSecurity(file, rows); break;
- case ClassLayout.ID: table = new ClassLayout(file, rows); break;
- case FieldLayout.ID: table = new FieldLayout(file, rows); break;
- case StandAloneSig.ID: table = new StandAloneSig(file, rows); break;
- case EventMap.ID: table = new EventMap(file, rows); break;
- case EventDef.ID: table = new EventDef(file, rows); break;
- case PropertyMap.ID: table = new PropertyMap(file, rows); break;
- case PropertyDef.ID: table = new PropertyDef(file, rows); break;
- case MethodSemantics.ID: table = new MethodSemantics(file, rows); break;
- case MethodImpl.ID: table = new MethodImpl(file, rows); break;
- case ModuleRef.ID: table = new ModuleRef(file, rows); break;
- case TypeSpec.ID: table = new TypeSpec(file, rows); break;
- case ImplMap.ID: table = new ImplMap(file, rows); break;
- case FieldRVA.ID: table = new FieldRVA(file, rows); break;
- case AssemblyDef.ID: table = new AssemblyDef(file, rows); break;
- case AssemblyProcessor.ID: table = new AssemblyProcessor(file, rows); break;
- case AssemblyOS.ID: table = new AssemblyOS(file, rows); break;
- case AssemblyRef.ID: table = new AssemblyRef(file, rows); break;
- case AssemblyRefProcessor.ID:
- table = new AssemblyRefProcessor(file, rows); break;
- case AssemblyRefOS.ID: table = new AssemblyRefOS(file, rows); break;
- case FileDef.ID: table = new FileDef(file, rows); break;
- case ExportedType.ID: table = new ExportedType(file, rows); break;
- case ManifestResource.ID: table = new ManifestResource(file, rows); break;
- case NestedClass.ID: table = new NestedClass(file, rows); break;
- case GenericParam.ID:
- table = new GenericParam(file, rows);
- break;
- case MethodSpec.ID:
- table = new MethodSpec(file, rows);
- break;
- case GenericParamConstraint.ID:
- table = new GenericParamConstraint(file, rows);
- break;
- default:
- table = new Empty(id);
- }
-// System.out.println("created table " + table.getName() + " with "
-// + table.rows + " rows");
- return table;
- }
-
-
- //##########################################################################
- // public fields
-
- /** Number of rows in the table. */
- public final int rows;
-
- /** Table ID as specified in Partition II. */
- public final int id;
-
- /** The file to which the table belongs. */
- protected final PEFile file;
-
- /** Memory mapped buffer wrapping the table. */
- protected ByteBuffer buffer;
-
- /**
- * specified wheter a new memory-mapped byte buffer should be created
- * for this table.
- */
- protected boolean newMapping = false;
-
- /** Tells wheter the table is indexed by 2-byte (short) integer
- * or by 4-byte integer. */
- public final boolean isShort;
-
- private int rowSize = -1;
-
- // the starting position of the table relative to the beginning of the file
- private long start = -1;
-
- // the number of the row who can be accessed via the fields of the table
- private int currentRow = 0;
-
- //##########################################################################
-
- protected Table(PEFile file, int id, int rows) {
- this.file = file;
- this.id = id;
- this.rows = rows;//file.readInt();
- this.isShort = rows < (1 << 16);
-// assert ((1L << id) & VALID_TABLES_MASK) != 0
-// : "Table does not have a vaid ID: " + byte2hex(id);
- }
-
- /**
- * Additional table initialization.
- * @return the starting position of the next table in the stream.
- */
- public final long init(long start) {
- if (rows < 1)
- return start;
- if (this.start == -1)
- this.start = start;
- else throw new RuntimeException
- ("Cannot re-initialize table \'" + getTableName() + "\'");
- rowSize = getRowSize();
- int size = rows * rowSize();
- buffer = this.newMapping ? file.mapBuffer(start, size)
- : file.getBuffer(start, size);
- return start + size;
- }
-
-
- public final String getTableName() {
- return 0 <= id && id < MAX_NUMBER ? tableName[id] : "<NoTable>";
- }
-
- /**
- * @return the size of the row in bytes
- */
- public final int rowSize() {
- return rowSize;
- }
-
- /**
- * if the underlying buffer is memory-mapped, load its contents into memory
- */
- public void load() {
- if (buffer instanceof MappedByteBuffer)
- ((MappedByteBuffer)buffer).load();
- }
-
- /***/
- public final int readByte() {
- return (buffer.get() + 0x100) & 0xff;
- }
-
- /***/
- public final int readShort() {
- return (buffer.getShort() + 0x10000) & 0xffff;
- }
-
- /***/
- public final int readInt() {
- return buffer.getInt();
- }
-
- /***/
- public final int readStringIndex() {
- return file.StringIsShort ? readShort() : readInt();
- }
-
- /***/
- public final int readBlobIndex() {
- return file.BlobIsShort ? readShort() : readInt();
- }
-
- /***/
- public final int readGUIDIndex() {
- return file.GUIDIsShort ? readShort() : readInt();
- }
-
- /***/
- public final int readTableIndex(int tableId) {
- return file.getTable(tableId).isShort ? readShort() : readInt();
- }
-
- /***/
- public final int readTableSetIndex(int tableSetId) {
- return file.indexSize[tableSetId] == 2 ? readShort() : readInt();
- }
-
- /** Read the specified row and populate the fields of the instance. */
- public final void readRow(int row) {
- seekRow(row);
- int lastSeek = buffer.position();
- populateFields();
- int rowSizeRead = (int) (buffer.position() - lastSeek);
- if (rowSizeRead != rowSize())
- throw new RuntimeException("Table ID=0x" + PEFile.byte2hex(id) +
- ": read row size = " + rowSizeRead +
- "; expected row size = " + rowSize());
- currentRow = row;
- }
-
- /** Seeks in the file the position of the specified row. */
- protected final void seekRow(int row) {
- assert row > 0 && row <= rows
- : "Index " + row + " is not within the table with #rows = " + rows;
- buffer.position((row - 1)* rowSize());
- }
-
- public final int currentRow() { return currentRow; }
-
- public final void nextRow() { readRow(currentRow() + 1); }
-
- //##########################################################################
- // abstract members
-
- /** Assigns values to the fields of the class. */
- protected abstract void populateFields();
-
- /** Returns the size of a row in bytes. */
- protected abstract int getRowSize();
-
- //##########################################################################
- // a table with 0 rows
-
- private static final class Empty extends Table {
- public Empty(int id) {
- super(null, id, 0);
- }
- protected int getRowSize() { return 0; }
- protected void populateFields() {
- throw new RuntimeException("Table 0x" + PEFile.byte2hex(id));
- }
- }
-
- //##########################################################################
- // table Module; ID=0x00; p115, 21.27
-
- public static final class ModuleDef extends Table {
- public static final int ID = 0x00;
-
- /** 2-byte value; reserved - shall be 0. */
- public int Generation;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #GUID; used to distinguish between
- * two version of the same module. */
- public int Mvid;
-
- /** Index into #GUID; reserved - shall be 0. */
- public int EncId;
-
- /** Index into #GUID; reseved - shall be 0. */
- public int EncBaseId;
-
- public ModuleDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Generation = readShort();
- Name = readStringIndex();
- Mvid = readGUIDIndex();
- EncId = readGUIDIndex();
- EncBaseId = readGUIDIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() + 3*file.getGUIDIndexSize();
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- } // class ModuleDef
-
- //##########################################################################
- // table TypeRef; ID=0x01; p125, 21.35
-
- public static final class TypeRef extends Table {
- public static final int ID = 0x1;
-
- /** A ResolutionScope coded index. */
- public int ResolutionScope;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #String. */
- public int Namespace;
-
- public TypeRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- ResolutionScope = readTableSetIndex(_ResolutionScope);
- Name = readStringIndex();
- Namespace = readStringIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_ResolutionScope) +
- 2 * file.getStringIndexSize();
- }
-
- public String getFullName() {
- String namespace = file.getString(Namespace);
- return namespace.length() == 0 ? file.getString(Name)
- : namespace + "." + file.getString(Name);
- }
-
- } // class TypeRef
-
- //##########################################################################
- // table TypeDef; ID=0x02; p120, 21.34
-
- public static final class TypeDef extends Table {
- public static final int ID = 0x02;
-
- /** 4-byte bitmask of type TypeAttributes (22.1.14). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #String. */
- public int Namespace;
-
- /** TypeDefOrRef coded index. */
- public int Extends;
-
- /** Index into Field table.
- */
- public int FieldList;
-
- /** Index into Method table. */
- public int MethodList;
-
-
- public TypeDef(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- public String getFullName() {
- String namespace = file.getString(Namespace);
- return namespace.length() == 0 ? file.getString(Name)
- : namespace + "." + file.getString(Name);
- }
-
- protected void populateFields() {
- Flags = readInt();
- Name = readStringIndex();
- Namespace = readStringIndex();
- Extends = readTableSetIndex(_TypeDefOrRef);
- FieldList = readTableIndex(FieldDef.ID);
- MethodList = readTableIndex(MethodDef.ID);
- }
-
- protected int getRowSize() {
- return 4 + 2*file.getStringIndexSize() +
- file.getTableSetIndexSize(_TypeDefOrRef) +
- file.getTableIndexSize(FieldDef.ID) +
- file.getTableIndexSize(MethodDef.ID);
- }
-
- } // class TypeDef
-
- //##########################################################################
- // Table FieldTrans; ID=0x03; undocumented
-
- /**
- * Undocumented table. Appears to be used for translating the Field entry
- * in the TypeDef(0x02) table into the real entry in the Fields(0x06) table
- */
- public static final class FieldTrans extends Table {
- public static final int ID = 0x03;
-
- public int Field;
-
- public FieldTrans(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Field = readTableIndex(FieldDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(FieldDef.ID);
- }
-
- }
-
- //##########################################################################
- // table Field; ID=0x04; p102, 21.15
-
- public static final class FieldDef extends Table {
- public static final int ID = 0x04;
-
- /** 2-byte bitmask of type FieldAttributes (22.1.5). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int Signature;
-
- public FieldDef(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Flags = readShort();
- Name = readStringIndex();
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() + file.getBlobIndexSize();
- }
-
- public String getName() { return file.getString(Name); }
-
- public Sig getSignature() { return file.getSignature(Signature); }
-
- } //class FieldDef
-
- //##########################################################################
- // Table MethodTrans; ID=0x05; undocumented
-
- /**
- * Undocumented table. Appears to be used for translating the Method entry
- * in the TypeDef(0x02) table into the real entry in the Methods(0x06) table
- */
- public static final class MethodTrans extends Table {
- public static final int ID = 0x05;
-
- public int Method;
-
- public MethodTrans(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Method = readTableIndex(FieldDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(MethodDef.ID);
- }
-
- }
-
- //##########################################################################
- // table MethodDef; ID=0x06; p110, 21.24
-
- public static final class MethodDef extends Table {
- public static final int ID = 0x06;
-
- /** 4-byte constant. */
- public int RVA;
-
- /** 2-byte bitmask of type MethodImplAttributes (22.1.10). */
- public int ImplFlags;
-
- /** 2-byte bitmask of type MethodAttributes (22.1.9). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int Signature;
-
- /** Index into Param Table. */
- public int ParamList;
-
- public MethodDef(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- RVA = readInt();
- ImplFlags = readShort();
- Flags = readShort();
- Name = readStringIndex();
- Signature = readBlobIndex();
- ParamList = readTableIndex(ParamDef.ID);
- }
-
- protected int getRowSize() {
- return 8 + file.getStringIndexSize() + file.getBlobIndexSize() +
- file.getTableIndexSize(ParamDef.ID);
- }
-
- public String getName() { return file.getString(Name); }
-
- public Sig getSignature() { return file.getSignature(Signature); }
- } // class Method
-
- //##########################################################################
- // table Param; ID=0x08; p116, 21.30
-
- public static final class ParamDef extends Table {
- public static final int ID = 0x08;
-
- /** 2-byte bitmask of type ParamAttributes (22.1.12). */
- public int Flags;
-
- /** 2-byte constant. */
- public int Sequence;
-
- /** Index into #String. */
- public int Name;
-
- public ParamDef(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Flags = readShort();
- Sequence = readShort();
- Name = readStringIndex();
- }
-
- protected int getRowSize() { return 4 + file.getStringIndexSize(); }
-
- public String getName() { return file.getString(Name); }
-
- } // class Param
-
- //##########################################################################
- // table InterfaceImpl, ID=0x09; p107, 21.21
-
- public static final class InterfaceImpl extends Table {
- public static final int ID = 0x09;
-
- /** Index into TypeDef table. */
- public int Class;
-
- /** Index into TypeDefOrRef table set. */
- public int Interface;
-
- public InterfaceImpl(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Class = readTableIndex(TypeDef.ID);
- Interface = readTableSetIndex(_TypeDefOrRef);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- file.getTableSetIndexSize(_TypeDefOrRef);
- }
-
- /** finds the index of the first entry
- * @param targetIndex - index in the TypeDef table - the type to look for
- * @return the index of the first interface for the given type;
- * 0 if the type doesn't implement any interfaces
- */
-
- // binary search implementation
-// public int findType(int targetIndex) {
-// int l = 1, h = rows;
-// int classIndex;
-// while (l <= h) {
-// int mid = (l + h) / 2;
-// seekRow(mid);
-// classIndex = readTableIndex(TypeDef.ID);
-// if (targetIndex <= classIndex) h = mid - 1;
-// else l = mid + 1;
-// }
-// return (targetIndex == classIndex) ? h : 0;
-// }
-
- //linear search implementation
- public int findType(int targetIndex) {
- for (int i = 1; i <= rows; i++) {
- seekRow(i);
- if (targetIndex == readTableIndex(TypeDef.ID))
- return i;
- }
- return 0;
- }
-
- } // class InterfaceImpl
-
- //##########################################################################
- // table MemberRef; ID=0x0a; p109, 21.23
-
- public static final class MemberRef extends Table {
- public static final int ID = 0x0a;
-
- /** Index into MemberRefParent table set. */
- public int Class;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int Signature;
-
- public MemberRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Class = readTableSetIndex(_MemberRefParent);
- Name = readStringIndex();
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_MemberRefParent) +
- file.getStringIndexSize() + file.getBlobIndexSize();
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- public Sig getSignature() {
- return file.getSignature(Signature);
- }
-
- } // class MemberRef
-
- //##########################################################################
- // table Constant; ID=0x0b; p95, 21.9
-
- public static final class Constant extends Table {
- public static final int ID = 0x0b;
-
- /** 1-byte constant followed by 1-byte padding 0 (see 22.1.15). */
- public int Type;
-
- /** Index into HasConst table set. */
- public int Parent;
-
- /** Index into #Blob. */
- public int Value;
-
- public Constant(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Type = readShort();
- Parent = readTableSetIndex(_HasConstant);
- Value = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getTableSetIndexSize(_HasConstant) +
- file.getBlobIndexSize();
- }
-
- public Object getValue() {
- if (Type == Signature.ELEMENT_TYPE_CLASS)
- return null;
- return file.Blob.getConstant(Type, Value);
- }
-
-
- } // class Constant
-
- //##########################################################################
- // table CustomAttribute; ID=0x0c; p95, 21.10
-
- public static final class CustomAttribute extends Table {
- public static final int ID = 0x0c;
-
- /** Index into any metadata table, except the CustomAttribute itself;
- * more precisely - index into HasCustomAttribute table set.
- */
- public int Parent;
-
- /** Index into the CustomAttributeType table set. */
- public int Type;
-
- /** Index into #Blob. */
- public int Value;
-
- public CustomAttribute(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableSetIndex(_HasCustomAttribute);
- Type = readTableSetIndex(_CustomAttributeType);
- Value = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_HasCustomAttribute) +
- file.getTableSetIndexSize(_CustomAttributeType) +
- file.getBlobIndexSize();
- }
-
- public byte[] getValue() {
- return Value == 0 ? null : file.getBlob(Value);
- }
- } // class CustomAttribute
-
- //##########################################################################
- // table FieldMarshal; ID=0x0d; p105, 21.17
-
- public static final class FieldMarshal extends Table {
- public static final int ID = 0x0d;
-
- /** Index into HasFieldMarshal table set. */
- public int Parent;
-
- /** Index into #Blob. */
- public int NativeType;
-
- public FieldMarshal(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableSetIndex(_HasFieldMarshal);
- NativeType = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_HasFieldMarshal) +
- file.getBlobIndexSize();
- }
-
- } // class FieldMarshal
-
- //##########################################################################
- // table DeclSecurity; ID=0x0e; p97, 21.11
-
- public static final class DeclSecurity extends Table {
- public static final int ID = 0x0e;
-
- /** 2-byte value. */
- public int Action;
-
- /** Index into HasDeclSecurity table set. */
- public int Parent;
-
- /** Index into #Blob. */
- public int PermissionSet;
-
- public DeclSecurity(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Action = readShort();
- Parent = readTableSetIndex(_HasDeclSecurity);
- PermissionSet = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getTableSetIndexSize(_HasDeclSecurity) +
- file.getBlobIndexSize();
- }
-
- } // class DeclSecurity
-
- //##########################################################################
- // table ClassLayout; ID=0x0f, p92, 21.8
-
- public static final class ClassLayout extends Table {
- public static final int ID = 0x0f;
-
- /** 2-byte constant. */
- public int PackingSize;
-
- /** 4-byte constant. */
- public int ClassSize;
-
- /** Index into TypeDef table. */
- public int Parent;
-
- public ClassLayout(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- PackingSize = readShort();
- ClassSize = readInt();
- Parent = readTableIndex(TypeDef.ID);
- }
-
- protected int getRowSize() {
- return 6 + file.getTableIndexSize(TypeDef.ID);
- }
-
- } // class ClassLayout
-
- //##########################################################################
- // table FieldLayout; ID=0x10; p104, 21.16
-
- public static final class FieldLayout extends Table {
- public static final int ID = 0x10;
-
- /** 4-byte constant. */
- public int Offset;
-
- /** Index into the Field table. */
- public int Field;
-
- public FieldLayout(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Offset = readInt();
- Field = readTableIndex(FieldDef.ID);
- }
-
- protected int getRowSize() {
- return 4 + file.getTableIndexSize(FieldDef.ID);
- }
-
- } // class FieldLayout
-
- //##########################################################################
- // table StandAloneSig; ID=0x11; p119, 21.33
-
- public static final class StandAloneSig extends Table {
- public static final int ID = 0x11;
-
- /** Index into #Blob. */
- public int Signature;
-
- public StandAloneSig(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() { return file.getBlobIndexSize(); }
-
- } // class StandAloneSig
-
- //##########################################################################
- // table EventMap; ID=0x12; p99, 21.12
-
- public static final class EventMap extends Table {
- public static final int ID = 0x12;
-
- /** Index into the TypeDef table. */
- public int Parent;
-
- /** Index into the Event table. */
- public int EventList;
-
- public EventMap(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableIndex(TypeDef.ID);
- EventList = readTableIndex(EventDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- file.getTableIndexSize(EventDef.ID);
- }
-
- } // class EventMap
-
- //##########################################################################
- // table Event; ID=0x14; p99, 21.13
-
- public static final class EventDef extends Table {
- public static final int ID = 0x14;
-
- /** 2-byte bitmask of type EventAttribute (22.1.4). */
- public int EventFlags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into TypeDefOrRef table set. [This corresponds to the Type
- * of the event; it is not the Type that owns the event]
- */
- public int EventType;
-
- public EventDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- EventFlags = readShort();
- Name = readStringIndex();
- EventType = readTableSetIndex(_TypeDefOrRef);
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() +
- file.getTableSetIndexSize(_TypeDefOrRef);
- }
-
- public String getName() { return file.getString(Name); }
-
- } // class EventDef
-
- //##########################################################################
- // table PropertyMap; ID=0x15; p119, 21.32
-
- public static final class PropertyMap extends Table {
- public static final int ID = 0x15;
-
- /** Index into the TypeDef table. */
- public int Parent;
-
- /** Index into the Property table. */
- public int PropertyList;
-
- public PropertyMap(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableIndex(TypeDef.ID);
- PropertyList = readTableIndex(PropertyDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- file.getTableIndexSize(PropertyDef.ID);
- }
-
- } // class PropertyMap
-
- //##########################################################################
- // table Property; ID=0x17; p117, 21.31
-
- public static final class PropertyDef extends Table {
- public static final int ID = 0x17;
-
- /** 2-byte bitmask of type PropertyAttributes (22.1.13). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. (Indexes the signature in the #Blob) */
- public int Type;
-
- public PropertyDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Flags = readShort();
- Name = readStringIndex();
- Type = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() +
- file.getBlobIndexSize();
- }
-
- public String getName() { return file.getString(Name); }
-
- public Sig getSignature() { return file.getSignature(Type); }
-
- } // class PropertyDef
-
- //##########################################################################
- // table MethodSemantics; ID=0x18; p114, 21.26
-
- public static final class MethodSemantics extends Table {
- public static final int ID = 0x18;
-
- /** 2-byte bitmaks of type MethodSemanticsAttribute (22.1.11). */
- public int Semantics;
-
- /** Index into the Method table. */
- public int Method;
-
- /** Index into Event or Property table (HasSemantics table set). */
- public int Association;
-
- public MethodSemantics(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Semantics = readShort();
- Method = readTableIndex(MethodDef.ID);
- Association = readTableSetIndex(_HasSemantics);
- }
-
- protected int getRowSize() {
- return 2 + file.getTableIndexSize(MethodDef.ID) +
- file.getTableSetIndexSize(_HasSemantics);
- }
-
- public boolean isGetter() { return (Semantics & Getter) != 0; }
- public boolean isSetter() { return (Semantics & Setter) != 0; }
- public boolean isOther() { return (Semantics & Other) != 0; }
- public boolean isAddOn() { return (Semantics & AddOn) != 0; }
- public boolean isRemoveOn() { return (Semantics & RemoveOn) != 0; }
- public boolean isFire() { return (Semantics & Fire) != 0; }
-
- private static final short Setter = (short)0x0001;
- private static final short Getter = (short)0x0002;
- private static final short Other = (short)0x0004;
- private static final short AddOn = (short)0x0008;
- private static final short RemoveOn = (short)0x0010;
- private static final short Fire = (short)0x0020;
-
- } // class MethodSemantics
-
-
- //##########################################################################
- // table MethodImpl; ID=0x19; p113, 21.25
-
- public static final class MethodImpl extends Table {
- public static final int ID = 0x19;
-
- /** Index into the TypeDef table. */
- public int Class;
-
- /** Index into MethodDefOrRef table set. */
- public int MethodBody;
-
- /** Index into MethodDefOrRef table set. */
- public int MethodDeclaration;
-
- public MethodImpl(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Class = readTableIndex(TypeDef.ID);
- MethodBody = readTableSetIndex(_MethodDefOrRef);
- MethodDeclaration = readTableSetIndex(_MethodDefOrRef);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- 2 * file.getTableSetIndexSize(_MethodDefOrRef);
- }
-
- } // class MethodImpl
-
- //##########################################################################
- // table ModuleRef; ID=0x1a; p116, 21.28
-
- public static final class ModuleRef extends Table {
- public static final int ID = 0x1a;
-
- /** Index into #String. */
- public int Name;
-
- public ModuleRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Name = readStringIndex();
- }
-
- protected int getRowSize() { return file.getStringIndexSize(); }
-
- public String getName() { return file.getString(Name); }
-
- } // class ModuleRef
-
- //##########################################################################
- // table TypeSpec; ID=0x1b; p126, 21.36
-
- public static final class TypeSpec extends Table {
- public static final int ID = 0x1b;
-
- /** Index into #Blob, where the blob is formatted
- * as specified in 22.2.15
- */
- public int Signature;
-
- public TypeSpec(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() { return file.getBlobIndexSize(); }
-
- public Sig getSignature() { return file.getSignature(Signature); }
- } // class TypeSpec
-
- //##########################################################################
- // table ImplMap; ID=0x1c; p107, 21.20
-
- public static final class ImplMap extends Table {
- public static final int ID = 0x1c;
-
- /** 2-byte bitmask of type PInvokeAttributes (22.1.7). */
- public int MappingFlags;
-
- /** Index into MemberForwarded table set. */
- public int MemberForwarded;
-
- /** Index into #String. */
- public int ImportName;
-
- /** Index into the ModuleRef table. */
- public int ImportScope;
-
- public ImplMap(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- MappingFlags = readShort();
- MemberForwarded = readTableSetIndex(_MemberForwarded);
- ImportName = readStringIndex();
- ImportScope = readTableIndex(ModuleRef.ID);
- }
-
- protected int getRowSize() {
- return 2 + file.getTableSetIndexSize(_MemberForwarded) +
- file.getStringIndexSize() +
- file.getTableIndexSize(ModuleRef.ID);
- }
-
- } // class ImplMap
-
- //##########################################################################
- // table FieldRVA; ID=0x1d; p106, 21.18
-
- public static final class FieldRVA extends Table {
- public static final int ID = 0x1d;
-
- /** 4-byte constant. */
- public int RVA;
-
- /** Index into the Field table. */
- public int Field;
-
- public FieldRVA(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- RVA = readInt();
- Field = readTableIndex(Table.FieldDef.ID);
- }
-
- protected int getRowSize() {
- return 4 + file.getTableIndexSize(FieldDef.ID);
- }
-
- }
-
- //##########################################################################
- // table Assembly; ID=0x20; p90, 21.2
-
- public static final class AssemblyDef extends Table {
- public static final int ID = 0x20;
-
- /** 4-byte constatnt of type AssemblyHashAlgorithm, clause 22.1.1 */
- public int HashAlgId;
-
- /** 2-byte constant */
- public int MajorVersion;
-
- /** 2-byte constant */
- public int MinorVersion;
-
- /** 2-byte constant */
- public int BuildNumber;
-
- /** 2-byte constant */
- public int RevisionNumber;
-
- /** 4-byte constant */
- public int Flags;
-
- /** index into #Blob */
- public int PublicKey;
-
- /** index into #String */
- public int Name;
-
- /** index into #String */
- public int Culture;
-
- public AssemblyDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- HashAlgId = readInt();
- MajorVersion = readShort();
- MinorVersion = readShort();
- BuildNumber = readShort();
- RevisionNumber = readShort();
- Flags = readInt();
- PublicKey = readBlobIndex();
- Name = readStringIndex();
- Culture = readStringIndex();
- }
-
- protected int getRowSize() {
- return 16 + file.getBlobIndexSize() + 2*file.getStringIndexSize();
- }
-
- } // class AssemblyDef
-
- //##########################################################################
- // table AssemblyProcessor; ID=0x21; p91, 21.4
-
- public static final class AssemblyProcessor extends Table {
- public static final int ID = 0x21;
-
- /** 4-byte constant. */
- public int Processor;
-
- public AssemblyProcessor(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Processor = readInt();
- }
-
- protected int getRowSize() { return 4; }
-
- }
-
- //##########################################################################
- // table AssemblyOS; ID = 0x22; p90, 21.3
-
- public static final class AssemblyOS extends Table {
- public static final int ID = 0x22;
-
- /** 4-byte constant. */
- public int OSPlatformID;
-
- /** 4-byte constant. */
- public int OSMajorVersion;
-
- /** 4-byte constant. */
- public int OSMinorVersion;
-
- public AssemblyOS(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- OSPlatformID = readInt();
- OSMajorVersion = readInt();
- OSMinorVersion = readInt();
- }
-
- protected int getRowSize() { return 12; }
-
- }
-
- //##########################################################################
- // table AssemblyRef; ID = 0x23; pp91, 21.5
-
- public static final class AssemblyRef extends Table {
- public static final int ID = 0x23;
-
- /** 2-byte constant. */
- public int MajorVersion;
-
- /** 2-byte constant. */
- public int MinorVersion;
-
- /** 2-byte constant. */
- public int BuildNumber;
-
- /** 2-byte constant. */
- public int RevisionNumber;
-
- /** 4-byte bitmask of type AssemblyFlags (22.1.2). */
- public int Flags;
-
- /** index into #Blob. */
- public int PublicKeyOrToken;
-
- /** index into #String. */
- public int Name;
-
- /** index into #String. */
- public int Culture;
-
- /** index into #Blob. */
- public int HashValue;
-
- public AssemblyRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- MajorVersion = readShort();
- MinorVersion = readShort();
- BuildNumber = readShort();
- RevisionNumber = readShort();
- Flags = readInt();
- PublicKeyOrToken = readBlobIndex();
- Name = readStringIndex();
- Culture = readStringIndex();
- HashValue = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 12 + 2*file.getBlobIndexSize() + 2*file.getStringIndexSize();
- }
-
- public String getName() { return file.getString(Name); }
- }
-
- //##########################################################################
- // table AssemblyRefProcessor; ID=0x24; p92, 21.7
-
- public static final class AssemblyRefProcessor extends Table {
- public static final int ID = 0x24;
-
- /** 4-byte constant. */
- public int Processor;
-
- /** Index into the AssemblyRef table. */
- public int AssemblyRef;
-
- public AssemblyRefProcessor(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Processor = readInt();
- AssemblyRef = readTableIndex(Table.AssemblyRef.ID);
- }
-
- protected int getRowSize() {
- return 4 + file.getTableIndexSize(Table.AssemblyRef.ID);
- }
-
- } // class AssemblyRefProcessor
-
- //##########################################################################
- // table AssemblyRefOS; ID=0x25; p92, 21.6
-
- public static final class AssemblyRefOS extends Table {
- public static final int ID = 0x25;
-
- /** 4-byte constant. */
- public int OSPlatformId;
-
- /** 4-byte constant. */
- public int OSMajorVersion;
-
- /** 4-byte constant. */
- public int OSMinorVersion;
-
- /** Index into the AssemblyRef table. */
- public int AssemblyRef;
-
- public AssemblyRefOS(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- OSPlatformId = readInt();
- OSMajorVersion = readInt();
- OSMinorVersion = readInt();
- AssemblyRef = readTableIndex(Table.AssemblyRef.ID);
- }
-
- protected int getRowSize() {
- return 12 + file.getTableIndexSize(Table.AssemblyRef.ID);
- }
-
- } // class AssemblyRefOS
-
- //##########################################################################
- // table File; ID=0x26; p106, 21.19
-
- public static final class FileDef extends Table {
- public static final int ID = 0x26;
-
- /** 4-byte bitmask of type FileAttributes (22.1.6). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int HashValue;
-
- public FileDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Flags = readInt();
- Name = readStringIndex();
- HashValue = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 4 + file.getStringIndexSize() + file.getBlobIndexSize();
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- } // class FileDef
-
- //##########################################################################
- // table ExportedType; ID=0x27; p100, 21.14
-
- public static final class ExportedType extends Table {
- public static final int ID = 0x27;
-
- /** 4-byte bitmask of type TypeAttribute (22.1.6). */
- public int Flags;
-
- /** 4-byte index into a TypeDef table of
- * another module in this assembly.
- */
- public int TypeDefId;
-
- /** Index into #String. */
- public int TypeName;
-
- /** Index into #Stream. */
- public int TypeNamespace;
-
- /** Index into one of two tables as follows:
- * - 'File' table, where that entry says which module
- * in the current assembly holds the TypeDef
- * - 'ExportedType' table, where that entry is
- * the enclosing Type of the current nested Type
- */
- public int Implementation;
-
- public ExportedType(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Flags = readInt();
- TypeDefId = readInt();
- TypeName = readStringIndex();
- TypeNamespace = readStringIndex();
- Implementation = readTableSetIndex(_Implementation);
- }
-
- protected int getRowSize() {
- return 8 + 2*file.getStringIndexSize() +
- file.getTableSetIndexSize(_Implementation);
- }
-
- public String getFullName() {
- String namespace = file.getString(TypeNamespace);
- return namespace.length() == 0 ? file.getString(TypeName)
- : namespace + "." + file.getString(TypeName);
- }
-
- } // class ExportedType
-
- //##########################################################################
- // table ManifestResource; ID=0x28; p108, 21.22
-
- public static final class ManifestResource extends Table {
- public static final int ID = 0x28;
-
- /** 4-byte constant. */
- public int Offset;
-
- /** 4-byte bitmask of type ManifestResourceAttributes (22.1.8). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into the Implementation table set. */
- public int Implementation;
-
- public ManifestResource(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Offset = readInt();
- Flags = readInt();
- Name = readStringIndex();
- Implementation = readTableSetIndex(_Implementation);
- }
-
- protected int getRowSize() {
- return 8 + file.getStringIndexSize() +
- file.getTableSetIndexSize(_Implementation);
- }
-
- } // class ManifestResource
-
- //##########################################################################
- // table NestedClass; ID=0x29; p116, 21.29
-
- public static final class NestedClass extends Table {
- public static final int ID = 0x29;
-
- /** Index into the TypeDef table. */
- public int NestedClass;
-
- /** Index into the TypeDef table. */
- public int EnclosingClass;
-
- public NestedClass(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- NestedClass = readTableIndex(TypeDef.ID);
- EnclosingClass = readTableIndex(TypeDef.ID);
- }
-
- protected int getRowSize() {
- return 2 * file.getTableIndexSize(TypeDef.ID);
- }
-
- } // class NestedClass
-
- //##########################################################################
- // table GenericParam; ID=0x2a; p137, 22.20
-
- public static final class GenericParam extends Table {
- public static final int ID = 0x2a;
-
- public int Number;
- public int Flags;
- public int Owner; // a TypeOrMethodDef (Sec 24.2.6) coded index
- public int Name; // a non-null index into the String heap
-
- private java.util.Map /*<Integer, java.util.Set<Integer>>*/ GenericParamIdxesForMethodDefIdx =
- new java.util.HashMap();
- private java.util.Map /*<Integer, java.util.Set<Integer>>*/ GenericParamIdxesForTypeDefIdx =
- new java.util.HashMap();
-
- private void addToMap(int key, int value, java.util.Map IdxesForIdx) {
- java.util.Set /*<Integer>*/ bucket = (java.util.Set)IdxesForIdx.get(Integer.valueOf(key));
- if(bucket == null) {
- bucket = new java.util.HashSet();
- IdxesForIdx.put(Integer.valueOf(key), bucket);
- }
- bucket.add(Integer.valueOf(value));
- }
-
- /** Indexes of rows in the GenericParam table representing type parameters defined by the type given by
- * its row index TypeDefIdx (in the TypeDef table).
- * No need to position the current record before invoking this method. */
- public int[] getTVarIdxes(int TypeDefIdx) {
- if(!mapsPopulated) {
- initMaps();
- }
- java.util.Set bucket = (java.util.Set)GenericParamIdxesForTypeDefIdx.get(Integer.valueOf(TypeDefIdx));
- if(bucket == null) {
- bucket = java.util.Collections.EMPTY_SET;
- }
- int[] res = new int[bucket.size()];
- java.util.Iterator /*<Integer>*/ it = bucket.iterator();
- for(int i = 0; i < bucket.size(); i++) {
- res[i] = ((Integer)it.next()).intValue();
- }
- return res;
- }
-
- /** Indexes of rows in the GenericParam table representing type parameters defined by the method given by
- * its row index MethodDefIdx (in the MethodDef table)
- * No need to position the current record before invoking this method. */
- public int[] getMVarIdxes(int MethodDefIdx) {
- if(!mapsPopulated) {
- initMaps();
- }
- java.util.Set bucket = (java.util.Set)GenericParamIdxesForMethodDefIdx.get(Integer.valueOf(MethodDefIdx));
- if(bucket == null) {
- bucket = java.util.Collections.EMPTY_SET;
- }
- int[] res = new int[bucket.size()];
- java.util.Iterator /*<Integer>*/ it = bucket.iterator();
- for(int i = 0; i < bucket.size(); i++) {
- res[i] = ((Integer)it.next()).intValue();
- }
- return res;
- }
-
- private boolean mapsPopulated = false;
-
- private void initMaps() {
- mapsPopulated = true;
- for (int currentParamRow = 1; currentParamRow <= rows; currentParamRow++) {
- int currentOwner = file.GenericParam(currentParamRow).Owner;
- int targetTableId = Table.getTableId(Table._TypeOrMethodDef, currentOwner);
- int targetRow = currentOwner >> Table.NoBits[Table._TypeOrMethodDef];
- if(targetTableId == TypeDef.ID){
- addToMap(targetRow, currentParamRow, GenericParamIdxesForTypeDefIdx);
- } else if(targetTableId == MethodDef.ID) {
- addToMap(targetRow, currentParamRow, GenericParamIdxesForMethodDefIdx);
- } else {
- throw new RuntimeException();
- }
- }
- }
-
- public GenericParam(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- protected void populateFields() {
- Number = readShort();
- Flags = readShort();
- Owner = readTableSetIndex(_TypeOrMethodDef);
- Name = readStringIndex();
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isInvariant() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x0003) == 0;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isCovariant() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x0003) == 1;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isContravariant() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x0003) == 2;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isReferenceType() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x001C) == 4;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isValueType() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x001C) == 8;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean hasDefaultConstructor() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x001C) == 0x0010;
- }
-
- protected int getRowSize() {
- return 2 + 2 + file.getTableSetIndexSize(_TypeOrMethodDef) + file.getStringIndexSize();
- /* Columns:
- Number (2 bytes),
- Flags (2 bytes),
- Owner (coded token of type TypeOrMethodDef),
- Name (offset in the #Strings stream).
- */
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- } // class GenericParam
-
-
- //##########################################################################
- // table GenericParamConstraint; ID=0x2c; p139, 22.20
-
- public static final class GenericParamConstraint extends Table {
- public static final int ID = 0x2c;
-
- public int Owner; // an index into the GenericParam table
- public int Constraint; // a TypeDefOrRef (Sec 24.2.6) coded index
-
- public GenericParamConstraint(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- protected void populateFields() {
- Owner = readTableIndex(GenericParam.ID);
- Constraint = readTableSetIndex(_TypeDefOrRef);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(GenericParam.ID) + file.getTableSetIndexSize(_TypeDefOrRef);
- /* Columns:
- Owner (RID in the GenericParam table),
- Constraint (coded token of type TypeDefOrRef).
- */
- }
-
- private boolean mapPopulated = false;
-
- /** Indexes of rows (in the TypeDef, TypeRef, or TypeSpec tables) denoting the base class (if any)
- * and interfaces (if any) that the generic parameter (of TVar or MVar kind) should support, where
- * that generic parameter is represented by its index into the GenericParam table. */
- public int[] getTypeDefOrRefIdxes(int genParamIdx) {
- if(!mapPopulated) {
- initMap();
- }
- java.util.Set bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(genParamIdx));
- if(bucket == null) {
- bucket = java.util.Collections.EMPTY_SET;
- }
- int[] res = new int[bucket.size()];
- java.util.Iterator /*<Integer>*/ it = bucket.iterator();
- for(int i = 0; i < bucket.size(); i++) {
- res[i] = ((Integer)it.next()).intValue();
- }
- return res;
- }
-
-
- private void initMap() {
- mapPopulated = true;
- for (int currentConstraintRow = 1; currentConstraintRow <= rows; currentConstraintRow++) {
- int targetGenericParam = file.GenericParamConstraint(currentConstraintRow).Owner;
- int value = file.GenericParamConstraint.Constraint;
- addToMap(targetGenericParam, value);
- }
- }
-
- private java.util.Map /*<Integer, java.util.Set<Integer>>*/ TypeDefOrRefIdxesForGenParamIdx =
- new java.util.HashMap();
-
- private void addToMap(int key, int value) {
- java.util.Set /*<Integer>*/ bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(key));
- if(bucket == null) {
- bucket = new java.util.HashSet();
- TypeDefOrRefIdxesForGenParamIdx.put(Integer.valueOf(key), bucket);
- }
- bucket.add(Integer.valueOf(value));
- }
-
- } // class GenericParamConstraint
-
- //##########################################################################
- // table MethodSpec; ID=0x2b; p149, in Sec. 22.29 of Partition II
-
- public static final class MethodSpec extends Table {
- public static final int ID = 0x2b;
-
- /* an index into the MethodDef or MemberRef table, specifying which generic method this row is an instantiation of.
- A MethodDefOrRef (Sec. 24.2.6) coded index */
- public int Method;
-
- /* an index into the Blob heap (Sec. 23.2.15), holding the signature of this instantiation */
- public int Instantiation;
-
- public MethodSpec(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- protected void populateFields() {
- Method = readTableSetIndex(_MethodDefOrRef);
- Instantiation = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_MethodDefOrRef) + file.getBlobIndexSize();
- }
-
-
- } // class MethodSpec
- //##########################################################################
-
-} // class Table
diff --git a/src/partest/README b/src/partest/README
index 0434aa7499..17594dbb1e 100644
--- a/src/partest/README
+++ b/src/partest/README
@@ -24,7 +24,6 @@ Other arguments:
* --run next files test the interpreter and all backends
* --jvm next files test the JVM backend
* --res next files test the resident compiler
- * --buildmanager next files test the build manager
* --shootout next files are shootout tests
* --script next files test the script runner
* ''-Dpartest.scalac_opts=...'' -> add compiler options
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index d73d99bc89..7495f97efd 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -21,7 +21,7 @@ abstract class CompilerTest extends DirectTest {
lazy val global: Global = newCompiler()
lazy val units = compilationUnits(global)(sources: _ *)
import global._
- import definitions._
+ import definitions.{ compilerTypeFromTag }
override def extraSettings = "-usejavacp -d " + testOutput.path
@@ -32,7 +32,6 @@ abstract class CompilerTest extends DirectTest {
def sources: List[String] = List(code)
// Utility functions
-
class MkType(sym: Symbol) {
def apply[M](implicit t: ru.TypeTag[M]): Type =
if (sym eq NoSymbol) NoType
@@ -50,7 +49,7 @@ abstract class CompilerTest extends DirectTest {
}
class SymsInPackage(pkgName: String) {
- def pkg = rootMirror.getRequiredPackage(pkgName)
+ def pkg = rootMirror.getPackage(pkgName)
def classes = allMembers(pkg) filter (_.isClass)
def modules = allMembers(pkg) filter (_.isModule)
def symbols = classes ++ terms filterNot (_ eq NoSymbol)
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index 483cb491a1..3f61062073 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -42,10 +42,7 @@ abstract class DirectTest extends App {
newCompiler(settings)
}
- def newCompiler(settings: Settings): Global = {
- if (settings.Yrangepos.value) new Global(settings, reporter(settings)) with interactive.RangePositions
- else new Global(settings, reporter(settings))
- }
+ def newCompiler(settings: Settings): Global = Global(settings, reporter(settings))
def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
diff --git a/src/partest/scala/tools/partest/JavapTest.scala b/src/partest/scala/tools/partest/JavapTest.scala
new file mode 100644
index 0000000000..3cb3dc6ca8
--- /dev/null
+++ b/src/partest/scala/tools/partest/JavapTest.scala
@@ -0,0 +1,26 @@
+
+package scala.tools.partest
+
+import scala.util.{Try,Success,Failure}
+import java.lang.System.{out => sysout}
+
+/** A trait for testing repl's javap command
+ * or possibly examining its output.
+ */
+abstract class JavapTest extends ReplTest {
+
+ /** Your Assertion Here, whatever you want to bejahen.
+ * Assertions must be satisfied by all flavors of javap
+ * and should not be fragile with respect to compiler output.
+ */
+ def yah(res: Seq[String]): Boolean
+
+ def baddies = List(":javap unavailable", ":javap not yet working")
+
+ // give it a pass if javap is broken
+ override def show() = try {
+ val res = eval().toSeq
+ val unsupported = res exists (s => baddies exists (s contains _))
+ assert ((unsupported || yah(res)), res.mkString("","\n","\n"))
+ } catch { case ae: AssertionError => ae.printStackTrace(sysout) }
+}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
index a21c602d14..5d98a8be81 100644
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ b/src/partest/scala/tools/partest/PartestDefaults.scala
@@ -8,8 +8,6 @@ import java.lang.Runtime.getRuntime
object PartestDefaults {
import nsc.Properties._
- private def wrapAccessControl[T](body: => Option[T]): Option[T] =
- try body catch { case _: java.security.AccessControlException => None }
def testRootName = propOrNone("partest.root")
def srcDirName = propOrElse("partest.srcdir", "files")
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 0199400ada..13207b16fd 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -43,7 +43,6 @@ import org.apache.tools.ant.types.Commandline.Argument
* - `runtests`,
* - `jvmtests`,
* - `residenttests`,
- * - `buildmanagertests`,
* - `shootouttests`,
* - `scalaptests`,
* - `scalachecktests`,
@@ -76,10 +75,6 @@ class PartestTask extends Task with CompilationPathProperty {
residentFiles = Some(input)
}
- def addConfiguredBuildManagerTests(input: FileSet) {
- buildManagerFiles = Some(input)
- }
-
def addConfiguredScalacheckTests(input: FileSet) {
scalacheckFiles = Some(input)
}
@@ -182,13 +177,11 @@ class PartestTask extends Task with CompilationPathProperty {
private var javaccmd: Option[File] = None
private var showDiff: Boolean = false
private var showLog: Boolean = false
- private var runFailed: Boolean = false
private var posFiles: Option[FileSet] = None
private var negFiles: Option[FileSet] = None
private var runFiles: Option[FileSet] = None
private var jvmFiles: Option[FileSet] = None
private var residentFiles: Option[FileSet] = None
- private var buildManagerFiles: Option[FileSet] = None
private var scalacheckFiles: Option[FileSet] = None
private var scriptFiles: Option[FileSet] = None
private var shootoutFiles: Option[FileSet] = None
@@ -245,7 +238,6 @@ class PartestTask extends Task with CompilationPathProperty {
private def getRunFiles = getFilesAndDirs(runFiles)
private def getJvmFiles = getFilesAndDirs(jvmFiles)
private def getResidentFiles = getFiles(residentFiles)
- private def getBuildManagerFiles = getFilesAndDirs(buildManagerFiles)
private def getScalacheckFiles = getFilesAndDirs(scalacheckFiles)
private def getScriptFiles = getFiles(scriptFiles)
private def getShootoutFiles = getFiles(shootoutFiles)
@@ -345,7 +337,6 @@ class PartestTask extends Task with CompilationPathProperty {
antFileManager.showDiff = showDiff
antFileManager.showLog = showLog
- antFileManager.failed = runFailed
antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
@@ -365,7 +356,6 @@ class PartestTask extends Task with CompilationPathProperty {
(getRunFiles, "run", "Compiling and running files"),
(getJvmFiles, "jvm", "Compiling and running files"),
(getResidentFiles, "res", "Running resident compiler scenarii"),
- (getBuildManagerFiles, "buildmanager", "Running Build Manager scenarii"),
(getScalacheckFiles, "scalacheck", "Running scalacheck tests"),
(getScriptFiles, "script", "Running script files"),
(getShootoutFiles, "shootout", "Running shootout tests"),
diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala
index 2d6f61d0b1..1f1c8a95ea 100644
--- a/src/partest/scala/tools/partest/SecurityTest.scala
+++ b/src/partest/scala/tools/partest/SecurityTest.scala
@@ -10,23 +10,10 @@ import java.util._
abstract class SecurityTest extends App {
def throwIt(x: Any) = throw new AccessControlException("" + x)
-
- def readPerm(p: PropertyPermission) = p.getActions contains "read"
- def writePerm(p: PropertyPermission) = p.getActions contains "write"
def propertyCheck(p: PropertyPermission): Unit = throwIt(p)
def check(perm: Permission): Unit = perm match {
case p: PropertyPermission => propertyCheck(p)
case _ => ()
}
-
- lazy val sm = new SecurityManager {
- // these two are the choke points for all permissions checks
- override def checkPermission(perm: Permission): Unit = check(perm)
- override def checkPermission(perm: Permission, context: Object): Unit = check(perm)
- }
- def securityOn(): Boolean = {
- try { System.setSecurityManager(sm) ; true }
- catch { case _: SecurityException => false }
- }
}
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
index 9bfd444180..5c177ac962 100644
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ b/src/partest/scala/tools/partest/TestUtil.scala
@@ -24,14 +24,6 @@ trait TestUtil {
}
def nanos(body: => Unit): Long = alsoNanos(body)._1
- def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = {
- val t1 = nanos(body1).toDouble
- val t2 = nanos(body2).toDouble
- val mult = if (t1 > t2) t1 / t2 else t2 / t1
-
- assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult)
- }
-
def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
try {
code
@@ -41,6 +33,6 @@ trait TestUtil {
}
}
+// Used in tests.
object TestUtil extends TestUtil {
-
}
diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
index 8a284b313b..18dd740208 100644
--- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
+++ b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
@@ -78,6 +78,7 @@ object Instrumentation {
!t.className.startsWith("scala/util/DynamicVariable")
}
+ // Used in tests.
def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = {
val stats = getStatistics
println("Method call statistics:")
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index 3d902d6d00..a8694cc0d6 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -9,8 +9,7 @@ package scala.tools.partest
package nest
import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
-import scala.tools.nsc.io.{ File => SFile }
-import scala.tools.nsc.interactive.RangePositions
+import scala.reflect.io.{ Directory, File => SFile, FileOperationException }
import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
import scala.tools.nsc.util.{ ClassPath, FakePos }
import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
@@ -52,10 +51,7 @@ abstract class SimpleCompiler {
class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
def newGlobal(settings: Settings, reporter: Reporter): Global =
- if (settings.Yrangepos.value)
- new Global(settings, reporter) with RangePositions
- else
- new Global(settings, reporter)
+ Global(settings, reporter)
def newGlobal(settings: Settings, logWriter: FileWriter): Global =
newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
@@ -69,11 +65,27 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
s
}
- private def updatePluginPath(options: String): String = {
+ implicit class Copier(f: SFile) {
+ // But what if f is bigger than CHUNK?!
+ def copyTo(dest: Path) {
+ dest.toFile writeAll f.slurp
+ }
+ }
+
+ // plugin path can be relative to test root, or cwd is out
+ private def updatePluginPath(options: String, out: Option[File], srcdir: Directory): String = {
val dir = fileManager.testRootDir
- def absolutize(path: String) = Path(path) match {
+ def pathOrCwd(p: String) =
+ if (p == "." && out.isDefined) {
+ val plugxml = "scalac-plugin.xml"
+ val pout = Path(out.get)
+ val pd = (srcdir / plugxml).toFile
+ if (pd.exists) pd copyTo (pout / plugxml)
+ pout
+ } else Path(p)
+ def absolutize(path: String) = pathOrCwd(path) match {
case x if x.isAbsolute => x.path
- case x => (fileManager.testRootDir / x).toAbsolute.path
+ case x => (dir / x).toAbsolute.path
}
val (opt1, opt2) = (options split "\\s").toList partition (_ startsWith "-Xplugin:")
@@ -90,17 +102,21 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
}
val logWriter = new FileWriter(log)
+ // this api has no notion of srcdir, so fake it
+ val fstFile = SFile(files(0))
+ val srcdir = fstFile.parent
+
// check whether there is a ".flags" file
+ def convertFlags(f: SFile) = updatePluginPath(f.slurp(), out, srcdir)
val logFile = basename(log.getName)
val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-")))
- val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
+ val argString = (SFile(log).parent / flagsFileName) ifFile (convertFlags) getOrElse ""
// slurp local flags (e.g., "A_1.flags")
- val fstFile = SFile(files(0))
def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num)
val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List())
val localFlagsList = if (inGroup.nonEmpty) {
- val localArgString = (fstFile.parent / (fstFile.stripExtension + ".flags")) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
+ val localArgString = (srcdir / (fstFile.stripExtension + ".flags")) ifFile (convertFlags) getOrElse ""
localArgString.split(' ').toList.filter(_.length > 0)
} else List()
@@ -140,8 +156,10 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
NestUI.verbose("compiling "+toCompile)
NestUI.verbose("with classpath: "+global.classPath.toString)
NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path"))
- try new global.Run compile toCompile
- catch {
+ try {
+ if (command.shouldStopWithInfo) logWriter append (command getInfoMessage global)
+ else new global.Run compile toCompile
+ } catch {
case FatalError(msg) =>
testRep.error(null, "fatal error: " + msg)
return CompilerCrashed
@@ -152,7 +170,7 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
}
finally logWriter.close()
- if (testRep.hasErrors) CompileFailed
+ if (testRep.hasErrors || command.shouldStopWithInfo) CompileFailed
else CompileSuccess
}
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 08e709de90..0ec3f60bf5 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -18,8 +18,6 @@ import io.{ Path, Directory }
import File.pathSeparator
import ClassPath.{ join }
import PathResolver.{ Environment, Defaults }
-import RunnerUtils._
-
class ConsoleFileManager extends FileManager {
var testBuild: Option[String] = PartestDefaults.testBuild
@@ -81,29 +79,24 @@ class ConsoleFileManager extends FileManager {
testClassesDir = Path(testClasses.get).toCanonical.toDirectory
NestUI.verbose("Running with classes in "+testClassesDir)
- latestFile = testClassesDir.parent / "bin"
latestLibFile = testClassesDir / "library"
latestActorsFile = testClassesDir / "library" / "actors"
latestReflectFile = testClassesDir / "reflect"
latestCompFile = testClassesDir / "compiler"
latestPartestFile = testClassesDir / "partest"
- latestFjbgFile = testParent / "lib" / "fjbg.jar"
}
else if (testBuild.isDefined) {
val dir = Path(testBuild.get)
NestUI.verbose("Running on "+dir)
- latestFile = dir / "bin"
latestLibFile = dir / "lib/scala-library.jar"
latestActorsFile = dir / "lib/scala-actors.jar"
latestReflectFile = dir / "lib/scala-reflect.jar"
latestCompFile = dir / "lib/scala-compiler.jar"
latestPartestFile = dir / "lib/scala-partest.jar"
- latestFjbgFile = testParent / "lib" / "fjbg.jar"
}
else {
def setupQuick() {
NestUI.verbose("Running build/quick")
- latestFile = prefixFile("build/quick/bin")
latestLibFile = prefixFile("build/quick/classes/library")
latestActorsFile = prefixFile("build/quick/classes/library/actors")
latestReflectFile = prefixFile("build/quick/classes/reflect")
@@ -114,7 +107,6 @@ class ConsoleFileManager extends FileManager {
def setupInst() {
NestUI.verbose("Running dist (installed)")
val p = testParent.getParentFile
- latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar")
@@ -124,7 +116,6 @@ class ConsoleFileManager extends FileManager {
def setupDist() {
NestUI.verbose("Running dists/latest")
- latestFile = prefixFile("dists/latest/bin")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
@@ -134,7 +125,6 @@ class ConsoleFileManager extends FileManager {
def setupPack() {
NestUI.verbose("Running build/pack")
- latestFile = prefixFile("build/pack/bin")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
@@ -142,11 +132,6 @@ class ConsoleFileManager extends FileManager {
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
- val dists = testParent / "dists"
- val build = testParent / "build"
- // in case of an installed dist, testRootDir is one level deeper
- val bin = testParent.parent / "bin"
-
def mostRecentOf(base: String, names: String*) =
names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
@@ -165,8 +150,6 @@ class ConsoleFileManager extends FileManager {
// run setup based on most recent time
pairs(pairs.keys max)()
-
- latestFjbgFile = prefixFile("lib/fjbg.jar")
}
LATEST_LIB = latestLibFile.getAbsolutePath
@@ -182,20 +165,16 @@ class ConsoleFileManager extends FileManager {
var LATEST_PARTEST: String = ""
var LATEST_ACTORS: String = ""
- var latestFile: File = _
var latestLibFile: File = _
var latestActorsFile: File = _
var latestReflectFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
- var latestFjbgFile: File = _
def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
var testClassesDir: Directory = _
// initialize above fields
findLatest()
- var testFiles: List[io.Path] = Nil
-
def getFiles(kind: String, cond: Path => Boolean): List[File] = {
def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
@@ -204,9 +183,7 @@ class ConsoleFileManager extends FileManager {
if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir)
else NestUI.failure("Directory '%s' not found" format dir)
- val files =
- if (testFiles.nonEmpty) testFiles filter (_.parent isSame dir)
- else dir.list filterNot ignoreDir filter cond toList
+ val files = dir.list filterNot ignoreDir filter cond toList
( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index e016fb7c92..6a24926b14 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -11,7 +11,6 @@ package nest
import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
InputStreamReader, StringWriter, PrintWriter}
import utils.Properties._
-import RunnerUtils._
import scala.tools.nsc.Properties.{ versionMsg, setProp }
import scala.tools.nsc.util.CommandLineParser
import scala.tools.nsc.io
@@ -26,15 +25,12 @@ class ConsoleRunner extends DirectRunner {
private def antFilter(p: Path) = p.isFile && (p endsWith "build.xml")
val testSets = {
- val pathFilter: Path => Boolean = x => x.isDirectory || (x hasExtension "scala")
-
List(
TestSet("pos", stdFilter, "Testing compiler (on files whose compilation should succeed)"),
TestSet("neg", stdFilter, "Testing compiler (on files whose compilation should fail)"),
TestSet("run", stdFilter, "Testing interpreter and backend"),
TestSet("jvm", stdFilter, "Testing JVM backend"),
TestSet("res", x => x.isFile && (x hasExtension "res"), "Testing resident compiler"),
- TestSet("buildmanager", _.isDirectory, "Testing Build Manager"),
TestSet("shootout", stdFilter, "Testing shootout tests"),
TestSet("script", stdFilter, "Testing script tests"),
TestSet("scalacheck", stdFilter, "Testing ScalaCheck tests"),
@@ -54,8 +50,6 @@ class ConsoleRunner extends DirectRunner {
private val testSetArgs = testSets map ("--" + _.kind)
private val testSetArgMap = testSetArgs zip testSets toMap
- def denotesTestSet(arg: String) = testSetArgs contains arg
-
private def printVersion() { NestUI outline (versionMsg + "\n") }
private val unaryArgs = List(
@@ -70,10 +64,11 @@ class ConsoleRunner extends DirectRunner {
// true if a test path matches the --grep expression.
private def pathMatchesExpr(path: Path, expr: String) = {
def pred(p: Path) = file2String(p.toFile) contains expr
- def srcs = path.toDirectory.deepList() filter (_.hasExtension("scala", "java"))
+ def greppable(f: Path) = f.isFile && (f hasExtension ("scala", "java"))
+ def any(d: Path) = d.toDirectory.deepList() exists (f => greppable(f) && pred(f))
(path.isFile && pred(path)) ||
- (path.isDirectory && srcs.exists(pred)) ||
+ (path.isDirectory && any(path)) ||
(pred(path changeExtension "check"))
}
@@ -94,8 +89,6 @@ class ConsoleRunner extends DirectRunner {
else if (parsed isSet "--pack") new ConsoleFileManager("build/pack")
else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
- def argNarrowsTests(x: String) = denotesTestSet(x) || denotesTestPath(x)
-
NestUI._verbose = parsed isSet "--verbose"
fileManager.showDiff = true
// parsed isSet "--show-diff"
@@ -121,7 +114,7 @@ class ConsoleRunner extends DirectRunner {
val grepOption = parsed get "--grep"
val grepPaths = grepOption.toList flatMap { expr =>
val subjectDirs = testSetKinds map (srcDir / _ toDirectory)
- val testPaths = subjectDirs flatMap (_.files filter stdFilter)
+ val testPaths = subjectDirs flatMap (_.list filter stdFilter)
val paths = testPaths filter (p => pathMatchesExpr(p, expr))
if (paths.isEmpty)
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index 32ef8b41ea..7e4c3b842c 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -14,7 +14,6 @@ import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.nsc.io.Path
import scala.collection.{ mutable, immutable }
import java.util.concurrent._
-import scala.collection.convert.decorateAll._
case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
@@ -39,27 +38,15 @@ trait DirectRunner {
def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = {
System.setProperty("line.separator", "\n")
- // @partest maintainer: we cannot create a fresh file manager here
- // since the FM must respect --buildpath and --classpath from the command line
- // for example, see how it's done in ReflectiveRunner
- //val consFM = new ConsoleFileManager
- //import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
- val latestCompFile = new File(fileManager.LATEST_COMP)
- val latestReflectFile = new File(fileManager.LATEST_REFLECT)
- val latestLibFile = new File(fileManager.LATEST_LIB)
- val latestPartestFile = new File(fileManager.LATEST_PARTEST)
- val latestActorsFile = new File(fileManager.LATEST_ACTORS)
- val scalacheckURL = PathSettings.scalaCheck.toURL
- val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- scalacheckURL :: (List(latestCompFile, latestReflectFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
- )
-
- val kindFiles = onlyValidTestPaths(_kindFiles)
- val pool = Executors.newFixedThreadPool(numThreads)
- val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
- val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
+ val allUrls = PathSettings.scalaCheck.toURL :: fileManager.latestUrls
+ val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(allUrls)
+ val kindFiles = onlyValidTestPaths(_kindFiles)
+ val pool = Executors.newFixedThreadPool(numThreads)
+ val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
+ val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
pool.shutdown()
+
try if (!pool.awaitTermination(4, TimeUnit.HOURS))
NestUI.warning("Thread pool timeout elapsed before all tests were complete!")
catch { case t: InterruptedException =>
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 2823967ecf..a32c56e973 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -13,7 +13,6 @@ import java.io.{File, FilenameFilter, IOException, StringWriter,
FileReader, PrintWriter, FileWriter}
import java.net.URI
import scala.tools.nsc.io.{ Path, Directory, File => SFile }
-import scala.sys.process._
import scala.collection.mutable
trait FileUtil {
@@ -65,6 +64,20 @@ trait FileManager extends FileUtil {
var LATEST_PARTEST: String
var LATEST_ACTORS: String
+ protected def relativeToLibrary(what: String): String = {
+ if (LATEST_LIB endsWith ".jar") {
+ (SFile(LATEST_LIB).parent / s"scala-$what.jar").toAbsolute.path
+ }
+ else {
+ (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
+ }
+ }
+ def latestScaladoc = relativeToLibrary("scaladoc")
+ def latestInteractive = relativeToLibrary("interactive")
+ def latestPaths = List(LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS, latestScaladoc, latestInteractive)
+ def latestFiles = latestPaths map (p => new java.io.File(p))
+ def latestUrls = latestFiles map (_.toURI.toURL)
+
var showDiff = false
var updateCheck = false
var showLog = false
@@ -73,17 +86,11 @@ trait FileManager extends FileUtil {
var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
var JAVA_OPTS = PartestDefaults.javaOpts
var timeout = PartestDefaults.timeout
- // how can 15 minutes not be enough? What are you doing, run/lisp.scala?
- // You complete in 11 seconds on my machine.
- var oneTestTimeout = 60 * 60 * 1000
/** Only when --debug is given. */
lazy val testTimings = new mutable.HashMap[String, Long]
def recordTestTiming(name: String, milliseconds: Long) =
synchronized { testTimings(name) = milliseconds }
- def showTestTimings() {
- testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) }
- }
def getLogFile(dir: File, fileBase: String, kind: String): File =
new File(dir, fileBase + "-" + kind + ".log")
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
index 70db6d0ed1..df90b22448 100644
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ b/src/partest/scala/tools/partest/nest/NestUI.scala
@@ -54,9 +54,6 @@ object NestUI {
}
def warning(msg: String) = print(_warning + msg + _default)
- def warning(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_warning + msg + _default)
- }
def normal(msg: String) = print(_default + msg)
def normal(msg: String, wr: PrintWriter) = synchronized {
@@ -76,7 +73,6 @@ object NestUI {
println(" --run run interpreter and backend tests")
println(" --jvm run JVM backend tests")
println(" --res run resident compiler tests")
- println(" --buildmanager run Build Manager tests")
println(" --scalacheck run ScalaCheck tests")
println(" --script run script runner tests")
println(" --shootout run shootout tests")
@@ -104,7 +100,6 @@ object NestUI {
}
var _verbose = false
- var _debug = false
def verbose(msg: String) {
if (_verbose) {
@@ -112,10 +107,4 @@ object NestUI {
println(msg)
}
}
- def debug(msg: String) {
- if (isPartestDebug) {
- outline("debug: ")
- println(msg)
- }
- }
}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index a42c2219b1..02651c527b 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -9,7 +9,6 @@ import scala.tools.nsc.Properties.{ setProp, propOrEmpty, propOrNone, propOrElse
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io
import io.{ Path, File, Directory }
-import RunnerUtils._
object PathSettings {
import PartestDefaults.{ testRootDir, srcDirName }
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 5cb8589d66..05cae7b238 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -3,8 +3,6 @@
* @author Philipp Haller
*/
-// $Id$
-
package scala.tools.partest
package nest
@@ -12,7 +10,6 @@ import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io
import io.Path
-import RunnerUtils._
import java.net.URLClassLoader
/* This class is used to load an instance of DirectRunner using
@@ -28,6 +25,12 @@ class ReflectiveRunner {
// was used to start the runner.
val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
+ private def searchPath(option: String, as: List[String]): Option[String] = as match {
+ case `option` :: r :: _ => Some(r)
+ case _ :: rest => searchPath(option, rest)
+ case Nil => None
+ }
+
def main(args: String) {
val argList = (args.split("\\s")).toList
@@ -47,22 +50,15 @@ class ReflectiveRunner {
else // auto detection
new ConsoleFileManager
- import fileManager.
- { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
- val files =
- Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
-
- val sepUrls = files map (_.toURL)
- var sepLoader = new URLClassLoader(sepUrls, null)
-
// this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, this paragraph of code can be safely removed
+ // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed
// we hack into the classloader that will become parent classloader for scalac
// this way we ensure that reflective macro lookup will pick correct Code.lift
- sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null)
+ val sepUrls = PathSettings.srcCodeLib.toURI.toURL :: fileManager.latestUrls
+ val sepLoader = new URLClassLoader(sepUrls.toArray, null)
if (isPartestDebug)
- println("Loading classes from:\n" + sepUrls.mkString("\n"))
+ println("Loading classes from:\n " + fileManager.latestUrls.mkString("\n "))
// @partest maintainer: it seems to me that commented lines are incorrect
// if classPath is not empty, then it has been provided by the --classpath option
@@ -73,11 +69,11 @@ class ReflectiveRunner {
// case Some(cp) => Nil
// case _ => files.toList map (_.path)
//}
- val paths = files.toList map (_.path)
- val newClasspath = ClassPath.join(paths: _*)
+ setProp("java.class.path", ClassPath.join(fileManager.latestPaths: _*))
- setProp("java.class.path", newClasspath)
+ // don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite
+ //setProp("scala.home", latestLibFile.parent.parent.path)
setProp("scala.home", "")
if (isPartestDebug)
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
index 0d979d2692..8f28277a6c 100644
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala
@@ -19,7 +19,6 @@ import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceSt
import ClassPath.{ join, split }
import scala.tools.scalap.scalax.rules.scalasig.ByteCode
import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.interactive.{ BuildManager, RefinedBuildManager }
import scala.sys.process._
import java.util.concurrent.{ Executors, TimeUnit, TimeoutException }
import PartestDefaults.{ javaCmd, javacCmd }
@@ -260,13 +259,12 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
runCommand(cmd, logFile)
}
- private def getCheckFilePath(dir: File, suffix: String = "") = {
+ private def getCheckFilePath(dir: File, suffix: String) = {
def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile
if (chkFile("").isFile || suffix == "") chkFile("")
else chkFile("-" + suffix)
}
- private def getCheckFile(dir: File) = Some(getCheckFilePath(dir, kind)) filter (_.canRead)
private def compareOutput(dir: File, logFile: File): String = {
val checkFile = getCheckFilePath(dir, kind)
@@ -287,15 +285,10 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
def newTestWriters() = {
val swr = new StringWriter
val wr = new PrintWriter(swr, true)
- // diff = ""
((swr, wr))
}
- def fail(what: Any) = {
- NestUI.verbose("scalac: compilation of "+what+" failed\n")
- false
- }
def diffCheck(testFile: File, diff: String) = {
testDiff = diff
testDiff == ""
@@ -341,10 +334,35 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
val (scalaFiles, javaFiles) = g partition isScala
val allFiles = javaFiles ++ scalaFiles
+ /* The test can contain both java and scala files, each of which should be compiled with the corresponding
+ * compiler. Since the source files can reference each other both ways (java referencing scala classes and
+ * vice versa, the partest compilation routine attempts to reach a "bytecode fixpoint" between the two
+ * compilers -- that's when bytecode generated by each compiler implements the signatures expected by the other.
+ *
+ * In theory this property can't be guaranteed, as neither compiler can know what signatures the other
+ * compiler expects and how to implement them. (see SI-1240 for the full story)
+ *
+ * In practice, this happens in 3 steps:
+ * STEP1: Feed all the files to scalac if there are also non-Scala sources.
+ * It will parse java files and obtain their expected signatures and generate bytecode for scala files
+ * STEP2: Feed the java files to javac if there are any.
+ * It will generate the bytecode for the java files and link to the scalac-generated bytecode for scala
+ * STEP3: (Re-)compile the scala sources so they link to the correct
+ * java signatures, in case the signatures deduced by scalac from the source files were wrong. Since the
+ * bytecode for java is already in place, we only feed the scala files to scalac so it will take the
+ * java signatures from the existing javac-generated bytecode.
+ * Note that no artifacts are deleted before this step.
+ */
List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) {
- case (CompileSuccess, 1) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) // java + scala
- case (CompileSuccess, 2) if javaFiles.nonEmpty => javac(outDir, javaFiles, logFile) // java
- case (CompileSuccess, 3) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) // scala
+ case (CompileSuccess, 1) if scalaFiles.nonEmpty && javaFiles.nonEmpty =>
+ compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile)
+ case (CompileSuccess, 2) if javaFiles.nonEmpty =>
+ javac(outDir, javaFiles, logFile)
+ case (CompileSuccess, 3) if scalaFiles.nonEmpty =>
+ // TODO: Do we actually need this? SI-1240 is known to require this, but we don't know if other tests
+ // require it: https://groups.google.com/forum/?fromgroups#!topic/scala-internals/rFDKAcOKciU
+ compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile)
+
case (outcome, _) => outcome
}
}
@@ -511,121 +529,6 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
case "ant" =>
runAntTest(file)
- case "buildmanager" =>
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
- val (outDir, testFile, changesDir) = {
- if (!file.isDirectory)
- (null, null, null)
- else {
- NestUI.verbose(this+" running test "+fileBase)
- val outDir = createOutputDir()
- val testFile = new File(file, fileBase + ".test")
- val changesDir = new File(file, fileBase + ".changes")
-
- if (changesDir.isFile || !testFile.isFile) {
- // if changes exists then it has to be a dir
- if (!testFile.isFile) NestUI.verbose("invalid build manager test file")
- if (changesDir.isFile) NestUI.verbose("invalid build manager changes directory")
- (null, null, null)
- }
- else {
- copyTestFiles(file, outDir)
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- (outDir, testFile, changesDir)
- }
- }
- }
- if (outDir == null)
- return (false, LogContext(logFile))
-
- // Pre-conditions satisfied
- val sourcepath = outDir.getAbsolutePath+File.separator
-
- // configure input/output files
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
- val testReader = new BufferedReader(new FileReader(testFile))
- val logConsoleWriter = new PrintWriter(logWriter, true)
-
- // create proper settings for the compiler
- val settings = new Settings(workerError)
- settings.outdir.value = outDir.getAbsoluteFile.getAbsolutePath
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- settings.Ybuildmanagerdebug.value = true
-
- // simulate Build Manager loop
- val prompt = "builder > "
- val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val bM: BuildManager =
- new RefinedBuildManager(settings) {
- override protected def newCompiler(settings: Settings) =
- new BuilderGlobal(settings, reporter)
- }
-
- def testCompile(line: String): Boolean = {
- NestUI.verbose("compiling " + line)
- val args = (line split ' ').toList
- val command = new CompilerCommand(args, settings)
- command.ok && {
- bM.update(filesToSet(settings.sourcepath.value, command.files), Set.empty)
- !reporter.hasErrors
- }
- }
-
- val updateFiles = (line: String) => {
- NestUI.verbose("updating " + line)
- (line split ' ').toList forall (u =>
- (u split "=>").toList match {
- case origFileName::(newFileName::Nil) =>
- val newFile = new File(changesDir, newFileName)
- if (newFile.isFile) {
- val v = overwriteFileWith(new File(outDir, origFileName), newFile)
- if (!v)
- NestUI.verbose("'update' operation on " + u + " failed")
- v
- } else {
- NestUI.verbose("File " + newFile + " is invalid")
- false
- }
- case a =>
- NestUI.verbose("Other =: " + a)
- false
- }
- )
- }
-
- def loop(): Boolean = {
- testReader.readLine() match {
- case null | "" =>
- NestUI.verbose("finished")
- true
- case s if s startsWith ">>update " =>
- updateFiles(s stripPrefix ">>update ") && loop()
- case s if s startsWith ">>compile " =>
- val files = s stripPrefix ">>compile "
- logWriter.println(prompt + files)
- // In the end, it can finish with an error
- if (testCompile(files)) loop()
- else {
- val t = testReader.readLine()
- (t == null) || (t == "")
- }
- case s =>
- NestUI.verbose("wrong command in test file: " + s)
- false
- }
- }
-
- Output.withRedirected(logWriter) {
- try loop()
- finally testReader.close()
- }
- fileManager.mapFile(logFile, replaceSlashes(new File(sourcepath), _))
-
- (diffCheck(file, compareOutput(file, logFile)), LogContext(logFile, swr, wr))
-
case "res" => {
// simulate resident compiler loop
val prompt = "\nnsc> "
@@ -832,9 +735,8 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
if (fileManager.failed && !runner.logFile.canRead)
return TestState.Ok
- // sys addShutdownHook cleanup()
- val ((success, ctx), elapsed) = timed(runner.run())
- val state = if (success) TestState.Ok else TestState.Fail
+ val (success, ctx) = runner.run()
+ val state = if (success) TestState.Ok else TestState.Fail
runner.reportResult(ctx.writers)
state
diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
deleted file mode 100644
index 6707a9338a..0000000000
--- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object RunnerUtils {
- def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList
-
- def searchPath(option: String, as: List[String]): Option[String] = as match {
- case `option` :: r :: _ => Some(r)
- case _ :: rest => searchPath(option, rest)
- case Nil => None
- }
-
- def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match {
- case -1 => (None, as)
- case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2)))
- }
-
- def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match {
- case -1 => (false, as)
- case idx => (true, (as take idx) ::: (as drop (idx + 1)))
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
index 87177772ab..880c6e431b 100644
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ b/src/partest/scala/tools/partest/nest/TestFile.scala
@@ -54,7 +54,6 @@ abstract class TestFile(val kind: String) extends TestFileCommon {
case class PosTestFile(file: JFile, fileManager: FileManager) extends TestFile("pos")
case class NegTestFile(file: JFile, fileManager: FileManager) extends TestFile("neg")
case class RunTestFile(file: JFile, fileManager: FileManager) extends TestFile("run")
-case class BuildManagerTestFile(file: JFile, fileManager: FileManager) extends TestFile("bm")
case class ScalaCheckTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalacheck")
case class JvmTestFile(file: JFile, fileManager: FileManager) extends TestFile("jvm")
case class ShootoutTestFile(file: JFile, fileManager: FileManager) extends TestFile("shootout") {
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
index d38ce692d7..2b2ce2e435 100644
--- a/src/partest/scala/tools/partest/package.scala
+++ b/src/partest/scala/tools/partest/package.scala
@@ -12,11 +12,7 @@ import scala.sys.process.javaVmArguments
import java.util.concurrent.Callable
package partest {
- class TestState {
- def isOk = this eq TestState.Ok
- def isFail = this eq TestState.Fail
- def isTimeout = this eq TestState.Timeout
- }
+ class TestState { }
object TestState {
val Ok = new TestState
val Fail = new TestState
@@ -43,9 +39,8 @@ package object partest {
def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
- def path2String(path: String) = file2String(new JFile(path))
def file2String(f: JFile) =
- try SFile(f).slurp()
+ try SFile(f).slurp(scala.io.Codec.UTF8)
catch { case _: FileNotFoundException => "" }
def basename(name: String): String = Path(name).stripExtension
@@ -74,7 +69,6 @@ package object partest {
def isPartestDebug: Boolean =
propOrEmpty("partest.debug") == "true"
-
import scala.language.experimental.macros
/**
diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala
deleted file mode 100644
index d25be87c1e..0000000000
--- a/src/partest/scala/tools/partest/utils/PrintMgr.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.tools.partest
-package utils
-
-/**
- * @author Thomas Hofer
- */
-object PrintMgr {
-
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- var outline = ""
- var success = ""
- var failure = ""
- var warning = ""
- var default = ""
-
- def initialization(number: Int) = number match {
- case MANY =>
- outline = Console.BOLD + Console.BLACK
- success = Console.BOLD + Console.GREEN
- failure = Console.BOLD + Console.RED
- warning = Console.BOLD + Console.YELLOW
- default = Console.RESET
- case SOME =>
- outline = Console.BOLD + Console.BLACK
- success = Console.RESET
- failure = Console.BOLD + Console.BLACK
- warning = Console.BOLD + Console.BLACK
- default = Console.RESET
- case _ =>
- }
-
- def printOutline(msg: String) = print(outline + msg + default)
-
- def printSuccess(msg: String) = print(success + msg + default)
-
- def printFailure(msg: String) = print(failure + msg + default)
-
- def printWarning(msg: String) = print(warning + msg + default)
-}
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 0c8e81a220..8f256aa1f5 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -59,8 +59,6 @@ private[reflect] trait BuildUtils { self: Universe =>
def flagsFromBits(bits: Long): FlagSet
- def emptyValDef: ValDef
-
def This(sym: Symbol): Tree
def Select(qualifier: Tree, sym: Symbol): Select
diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala
new file mode 100644
index 0000000000..3f377d6cff
--- /dev/null
+++ b/src/reflect/scala/reflect/api/ImplicitTags.scala
@@ -0,0 +1,108 @@
+package scala.reflect
+package api
+
+trait ImplicitTags {
+ self: Types =>
+
+ /** A tag that preserves the identity of the `Type` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeTagg: ClassTag[Type]
+
+ /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SingletonTypeTag: ClassTag[SingletonType]
+
+ /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ThisTypeTag: ClassTag[ThisType]
+
+ /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SingleTypeTag: ClassTag[SingleType]
+
+ /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SuperTypeTag: ClassTag[SuperType]
+
+ /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ConstantTypeTag: ClassTag[ConstantType]
+
+ /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeRefTag: ClassTag[TypeRef]
+
+ /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val CompoundTypeTag: ClassTag[CompoundType]
+
+ /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val RefinedTypeTag: ClassTag[RefinedType]
+
+ /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
+
+ /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val MethodTypeTag: ClassTag[MethodType]
+
+ /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
+
+ /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val PolyTypeTag: ClassTag[PolyType]
+
+ /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ExistentialTypeTag: ClassTag[ExistentialType]
+
+ /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
+
+ /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeBoundsTag: ClassTag[TypeBounds]
+
+ /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+}
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index d0d8a37584..d30563c706 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -352,6 +352,11 @@ trait Mirrors { self: Universe =>
* the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
*/
def set(value: Any): Unit
+
+ /** Creates a new mirror which uses the same symbol, but is bound to a different receiver.
+ * This is significantly faster than recreating the mirror from scratch.
+ */
+ def bind(newReceiver: Any): FieldMirror
}
/** A mirror that reflects a method.
@@ -373,6 +378,11 @@ trait Mirrors { self: Universe =>
* with invoking the corresponding method or constructor.
*/
def apply(args: Any*): Any
+
+ /** Creates a new mirror which uses the same symbol, but is bound to a different receiver.
+ * This is significantly faster than recreating the mirror from scratch.
+ */
+ def bind(newReceiver: Any): MethodMirror
}
/** A mirror that reflects the instance or static parts of a runtime class.
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index 7c12f180a8..6290b88d33 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -58,7 +58,7 @@ trait Names {
* Can be used for pattern matching, instance tests, serialization and likes.
* @group Tags
*/
-implicit val TypeNameTag: ClassTag[TypeName]
+ implicit val TypeNameTag: ClassTag[TypeName]
/** The abstract type of names representing types.
* @group Names
@@ -75,10 +75,10 @@ implicit val TypeNameTag: ClassTag[TypeName]
* @group API
*/
abstract class NameApi {
- /** Checks wether the name is a a term name */
+ /** Checks wether the name is a term name */
def isTermName: Boolean
- /** Checks wether the name is a a type name */
+ /** Checks wether the name is a type name */
def isTypeName: Boolean
/** Returns a term name that wraps the same string as `this` */
@@ -109,10 +109,38 @@ implicit val TypeNameTag: ClassTag[TypeName]
/** Create a new term name.
* @group Names
*/
+ @deprecated("Use TermName instead", "2.11.0")
def newTermName(s: String): TermName
/** Creates a new type name.
* @group Names
*/
+ @deprecated("Use TypeName instead", "2.11.0")
def newTypeName(s: String): TypeName
+
+ /** The constructor/extractor for `TermName` instances.
+ * @group Extractors
+ */
+ val TermName: TermNameExtractor
+
+ /** An extractor class to create and pattern match with syntax `TermName(s)`.
+ * @group Extractors
+ */
+ abstract class TermNameExtractor {
+ def apply(s: String): TermName
+ def unapply(name: TermName): Option[String]
+ }
+
+ /** The constructor/extractor for `TypeName` instances.
+ * @group Extractors
+ */
+ val TypeName: TypeNameExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeName(s)`.
+ * @group Extractors
+ */
+ abstract class TypeNameExtractor {
+ def apply(s: String): TypeName
+ def unapply(name: TypeName): Option[String]
+ }
}
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 85ddcc6523..d9e05e77c1 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -143,6 +143,7 @@ trait Printers { self: Universe =>
protected var printIds = false
protected var printKinds = false
protected var printMirrors = false
+ protected var printPositions = false
def withTypes: this.type = { printTypes = true; this }
def withoutTypes: this.type = { printTypes = false; this }
def withIds: this.type = { printIds = true; this }
@@ -151,10 +152,12 @@ trait Printers { self: Universe =>
def withoutKinds: this.type = { printKinds = false; this }
def withMirrors: this.type = { printMirrors = true; this }
def withoutMirrors: this.type = { printMirrors = false; this }
+ def withPositions: this.type = { printPositions = true; this }
+ def withoutPositions: this.type = { printPositions = false; this }
}
/** @group Printers */
- case class BooleanFlag(val value: Option[Boolean])
+ case class BooleanFlag(value: Option[Boolean])
/** @group Printers */
object BooleanFlag {
import scala.language.implicitConversions
@@ -163,31 +166,32 @@ trait Printers { self: Universe =>
}
/** @group Printers */
- protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = {
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String = {
val buffer = new StringWriter()
val writer = new PrintWriter(buffer)
- var printer = mkPrinter(writer)
+ val printer = mkPrinter(writer)
printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors)
+ printPositions.value.map(printPositions => if (printPositions) printer.withPositions else printer.withoutPositions)
printer.print(what)
writer.flush()
buffer.toString
}
/** By default trees are printed with `show`
- * @group Printers
+ * @group Printers
*/
override protected def treeToString(tree: Tree) = show(tree)
/** Renders a representation of a reflection artifact
- * as desugared Java code.
+ * as desugared Scala code.
*
- * @group Printers
+ * @group Printers
*/
- def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
- render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+ def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String =
+ render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors, printPositions)
/** Hook to define what `show(...)` means.
* @group Printers
@@ -195,12 +199,12 @@ trait Printers { self: Universe =>
protected def newTreePrinter(out: PrintWriter): TreePrinter
/** Renders internal structure of a reflection artifact as the
- * visualization of a Scala syntax tree.
+ * visualization of a Scala syntax tree.
*
- * @group Printers
+ * @group Printers
*/
- def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
- render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+ def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String =
+ render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors, printPositions)
/** Hook to define what `showRaw(...)` means.
* @group Printers
diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala
index 4886e4f8f7..6c78f18716 100644
--- a/src/reflect/scala/reflect/api/StandardNames.scala
+++ b/src/reflect/scala/reflect/api/StandardNames.scala
@@ -84,6 +84,11 @@ trait StandardNames {
*/
val ROOTPKG: NameType
+ /** The term name `<empty>`.
+ * Represents the empty package.
+ */
+ val EMPTY_PACKAGE_NAME: NameType
+
/** The string " " (a single whitespace).
* `LOCAL_SUFFIX_STRING` is appended to the names of local identifiers,
* when it's necessary to prevent a naming conflict. For example, underlying fields
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index c8e03f1d91..dbad3dd478 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -245,7 +245,7 @@ trait Symbols { self: Universe =>
/** Does this symbol represent the definition of a term?
* Note that every symbol is either a term or a type.
* So for every symbol `sym` (except for `NoSymbol`),
- * either `sym.isTerm` is true or `sym.isTerm` is true.
+ * either `sym.isTerm` is true or `sym.isType` is true.
*
* @group Tests
*/
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 0937a93738..0170bf2032 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -75,11 +75,32 @@ trait Trees { self: Universe =>
def isDef: Boolean
/** Is this tree one of the empty trees?
+ *
* Empty trees are: the `EmptyTree` null object, `TypeTree` instances that don't carry a type
* and the special `emptyValDef` singleton.
+ *
+ * In the compiler the `isEmpty` check and the derived `orElse` method are mostly used
+ * as a check for a tree being a null object (`EmptyTree` for term trees and empty TypeTree for type trees).
+ *
+ * Unfortunately `emptyValDef` is also considered to be `isEmpty`, but this is deemed to be
+ * a conceptual mistake pending a fix in https://issues.scala-lang.org/browse/SI-6762.
+ *
+ * @see `canHaveAttrs`
*/
def isEmpty: Boolean
+ /** Is this tree one of the empty trees?
+ *
+ * @see `isEmpty`
+ */
+ def nonEmpty: Boolean
+
+ /** Can this tree carry attributes (i.e. symbols, types or positions)?
+ * Typically the answer is yes, except for the `EmptyTree` null object and
+ * two special singletons: `emptyValDef` and `pendingSuperCall`.
+ */
+ def canHaveAttrs: Boolean
+
/** The canonical way to test if a Tree represents a term.
*/
def isTerm: Boolean
@@ -2405,6 +2426,15 @@ trait Trees { self: Universe =>
*/
val emptyValDef: ValDef
+ /** An empty superclass constructor call corresponding to:
+ * super.<init>()
+ * This is used as a placeholder in the primary constructor body in class templates
+ * to denote the insertion point of a call to superclass constructor after the typechecker
+ * figures out the superclass of a given template.
+ * @group Trees
+ */
+ val pendingSuperCall: Apply
+
// ---------------------- factories ----------------------------------------------
/** A factory method for `ClassDef` nodes.
@@ -2897,7 +2927,8 @@ trait Trees { self: Universe =>
def transform(tree: Tree): Tree = itransform(this, tree)
/** Transforms a list of trees. */
- def transformTrees(trees: List[Tree]): List[Tree] = trees mapConserve (transform(_))
+ def transformTrees(trees: List[Tree]): List[Tree] =
+ if (trees.isEmpty) Nil else trees mapConserve transform
/** Transforms a `Template`. */
def transformTemplate(tree: Template): Template =
@@ -2907,7 +2938,8 @@ trait Trees { self: Universe =>
trees mapConserve (tree => transform(tree).asInstanceOf[TypeDef])
/** Transforms a `ValDef`. */
def transformValDef(tree: ValDef): ValDef =
- if (tree.isEmpty) tree else transform(tree).asInstanceOf[ValDef]
+ if (tree eq emptyValDef) tree
+ else transform(tree).asInstanceOf[ValDef]
/** Transforms a list of `ValDef` nodes. */
def transformValDefs(trees: List[ValDef]): List[ValDef] =
trees mapConserve (transformValDef(_))
@@ -2926,8 +2958,10 @@ trait Trees { self: Universe =>
if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(transform(stat))
else transform(stat)) filter (EmptyTree != _)
/** Transforms `Modifiers`. */
- def transformModifiers(mods: Modifiers): Modifiers =
- mods.mapAnnotations(transformTrees)
+ def transformModifiers(mods: Modifiers): Modifiers = {
+ if (mods.annotations.isEmpty) mods
+ else mods mapAnnotations transformTrees
+ }
/** Transforms a tree with a given owner symbol. */
def atOwner[A](owner: Symbol)(trans: => A): A = {
@@ -2993,15 +3027,19 @@ trait Trees { self: Universe =>
/** The constructor/extractor for `Modifiers` instances.
* @group Traversal
*/
- val Modifiers: ModifiersCreator
+ val Modifiers: ModifiersExtractor
+
+ @deprecated("Use ModifiersExtractor instead", "2.11.0")
+ type ModifiersCreator = ModifiersExtractor
/** An extractor class to create and pattern match with syntax `Modifiers(flags, privateWithin, annotations)`.
* Modifiers encapsulate flags, visibility annotations and Scala annotations for member definitions.
* @group Traversal
*/
- abstract class ModifiersCreator {
+ abstract class ModifiersExtractor {
def apply(): Modifiers = Modifiers(NoFlags, tpnme.EMPTY, List())
def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
+ def unapply(mods: Modifiers): Option[(FlagSet, Name, List[Tree])]
}
/** The factory for `Modifiers` instances.
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index 72163ef0e9..e5140f23e5 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -50,7 +50,8 @@ package api
*
* @contentDiagram hideNodes "*Api"
*/
-trait Types { self: Universe =>
+trait Types extends ImplicitTags {
+ self: Universe =>
/** The type of Scala types, and also Scala type signatures.
* (No difference is internally made between the two).
@@ -59,12 +60,6 @@ trait Types { self: Universe =>
*/
type Type >: Null <: TypeApi
- /** A tag that preserves the identity of the `Type` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeTagg: ClassTag[Type]
-
/** This constant is used as a special value that indicates that no meaningful type exists.
* @group Types
*/
@@ -256,12 +251,6 @@ trait Types { self: Universe =>
*/
type SingletonType >: Null <: Type
- /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingletonTypeTag: ClassTag[SingletonType]
-
/** A singleton type that describes types of the form on the left with the
* corresponding `ThisType` representation to the right:
* {{{
@@ -272,12 +261,6 @@ trait Types { self: Universe =>
*/
type ThisType >: Null <: AnyRef with SingletonType with ThisTypeApi
- /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ThisTypeTag: ClassTag[ThisType]
-
/** The constructor/extractor for `ThisType` instances.
* @group Extractors
*/
@@ -316,12 +299,6 @@ trait Types { self: Universe =>
*/
type SingleType >: Null <: AnyRef with SingletonType with SingleTypeApi
- /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingleTypeTag: ClassTag[SingleType]
-
/** The constructor/extractor for `SingleType` instances.
* @group Extractors
*/
@@ -361,12 +338,6 @@ trait Types { self: Universe =>
*/
type SuperType >: Null <: AnyRef with SingletonType with SuperTypeApi
- /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SuperTypeTag: ClassTag[SuperType]
-
/** The constructor/extractor for `SuperType` instances.
* @group Extractors
*/
@@ -406,12 +377,6 @@ trait Types { self: Universe =>
*/
type ConstantType >: Null <: AnyRef with SingletonType with ConstantTypeApi
- /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ConstantTypeTag: ClassTag[ConstantType]
-
/** The constructor/extractor for `ConstantType` instances.
* @group Extractors
*/
@@ -450,12 +415,6 @@ trait Types { self: Universe =>
*/
type TypeRef >: Null <: AnyRef with Type with TypeRefApi
- /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeRefTag: ClassTag[TypeRef]
-
/** The constructor/extractor for `TypeRef` instances.
* @group Extractors
*/
@@ -497,12 +456,6 @@ trait Types { self: Universe =>
*/
type CompoundType >: Null <: AnyRef with Type
- /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val CompoundTypeTag: ClassTag[CompoundType]
-
/** The `RefinedType` type defines types of any of the forms on the left,
* with their RefinedType representations to the right.
* {{{
@@ -515,12 +468,6 @@ trait Types { self: Universe =>
*/
type RefinedType >: Null <: AnyRef with CompoundType with RefinedTypeApi
- /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val RefinedTypeTag: ClassTag[RefinedType]
-
/** The constructor/extractor for `RefinedType` instances.
* @group Extractors
*/
@@ -567,12 +514,6 @@ trait Types { self: Universe =>
*/
type ClassInfoType >: Null <: AnyRef with CompoundType with ClassInfoTypeApi
- /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
-
/** The constructor/extractor for `ClassInfoType` instances.
* @group Extractors
*/
@@ -610,12 +551,6 @@ trait Types { self: Universe =>
*/
type MethodType >: Null <: AnyRef with Type with MethodTypeApi
- /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MethodTypeTag: ClassTag[MethodType]
-
/** The constructor/extractor for `MethodType` instances.
* @group Extractors
*/
@@ -660,12 +595,6 @@ trait Types { self: Universe =>
*/
type NullaryMethodType >: Null <: AnyRef with Type with NullaryMethodTypeApi
- /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
-
/** The constructor/extractor for `NullaryMethodType` instances.
* @group Extractors
*/
@@ -696,12 +625,6 @@ trait Types { self: Universe =>
*/
type PolyType >: Null <: AnyRef with Type with PolyTypeApi
- /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val PolyTypeTag: ClassTag[PolyType]
-
/** The constructor/extractor for `PolyType` instances.
* @group Extractors
*/
@@ -736,12 +659,6 @@ trait Types { self: Universe =>
*/
type ExistentialType >: Null <: AnyRef with Type with ExistentialTypeApi
- /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ExistentialTypeTag: ClassTag[ExistentialType]
-
/** The constructor/extractor for `ExistentialType` instances.
* @group Extractors
*/
@@ -777,12 +694,6 @@ trait Types { self: Universe =>
*/
type AnnotatedType >: Null <: AnyRef with Type with AnnotatedTypeApi
- /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
-
/** The constructor/extractor for `AnnotatedType` instances.
* @group Extractors
*/
@@ -828,12 +739,6 @@ trait Types { self: Universe =>
*/
type TypeBounds >: Null <: AnyRef with Type with TypeBoundsApi
- /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeBoundsTag: ClassTag[TypeBounds]
-
/** The constructor/extractor for `TypeBounds` instances.
* @group Extractors
*/
@@ -885,12 +790,6 @@ trait Types { self: Universe =>
*/
type BoundedWildcardType >: Null <: AnyRef with Type with BoundedWildcardTypeApi
- /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
-
/** The constructor/extractor for `BoundedWildcardType` instances.
* @group Extractors
*/
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 1ab975b233..73cc7fbbd6 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -52,7 +52,7 @@ trait AnnotationCheckers {
* given type tp, taking into account the given mode (see method adapt in trait Typers).
*/
@deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
+ def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = false
/**
* Adapt a tree that has an annotated type to the given type tp, taking into account the given
@@ -62,7 +62,7 @@ trait AnnotationCheckers {
* class cannot do the adaptiong, it should return the tree unchanged.
*/
@deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
+ def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree
/**
* Adapt the type of a return expression. The decision of a typer plugin whether the type
@@ -126,13 +126,13 @@ trait AnnotationCheckers {
else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
if (!checker.isActive()) tpe else checker.addAnnotations(tree, tpe))
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean =
+ def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean =
if (annotationCheckers.isEmpty) false
else annotationCheckers.exists(checker => {
checker.isActive() && checker.canAdaptAnnotations(tree, mode, pt)
})
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree =
+ def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree =
if (annotationCheckers.isEmpty) tree
else annotationCheckers.foldLeft(tree)((tree, checker) =>
if (!checker.isActive()) tree else checker.adaptAnnotations(tree, mode, pt))
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 032b45316e..f9a026744c 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -6,14 +6,13 @@
package scala.reflect
package internal
-import util._
import pickling.ByteCodecs
import scala.annotation.tailrec
import scala.collection.immutable.ListMap
/** AnnotationInfo and its helpers */
trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
- import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation }
+ import definitions.{ ThrowsClass, ThrowableClass, StaticAnnotationClass, isMetaAnnotation }
// Common annotation code between Symbol and Type.
// For methods altering the annotation list, on Symbol it mutates
@@ -304,10 +303,6 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
/** Check whether any of the arguments mention a symbol */
def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym)
- /** Change all ident's with Symbol "from" to instead use symbol "to" */
- def substIdentSyms(from: Symbol, to: Symbol) =
- AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos
-
def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue)
def intArg(index: Int) = constantAtIndex(index) map (_.intValue)
def symbolArg(index: Int) = argAtIndex(index) collect {
@@ -342,13 +337,15 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
+ object ErroneousAnnotation extends CompleteAnnotationInfo(ErrorType, Nil, Nil)
+
/** Extracts symbol of thrown exception from AnnotationInfo.
*
* Supports both “old-style” `@throws(classOf[Exception])`
* as well as “new-stye” `@throws[Exception]("cause")` annotations.
*/
object ThrownException {
- def unapply(ann: AnnotationInfo): Option[Symbol] =
+ def unapply(ann: AnnotationInfo): Option[Symbol] = {
ann match {
case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass =>
None
@@ -356,8 +353,11 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) =>
Some(tpe.typeSymbol)
// new-style: @throws[Exception], @throws[Exception]("cause")
- case AnnotationInfo(TypeRef(_, _, args), _, _) =>
- Some(args.head.typeSymbol)
+ case AnnotationInfo(TypeRef(_, _, arg :: _), _, _) =>
+ Some(arg.typeSymbol)
+ case AnnotationInfo(TypeRef(_, _, Nil), _, _) =>
+ Some(ThrowableClass)
}
+ }
}
}
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 3c2b128c52..9daf9504f1 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -64,7 +64,7 @@ trait BaseTypeSeqs {
//Console.println("compute closure of "+this+" => glb("+variants+")")
pending += i
try {
- mergePrefixAndArgs(variants, -1, lubDepth(variants)) match {
+ mergePrefixAndArgs(variants, Variance.Contravariant, lubDepth(variants)) match {
case Some(tp0) =>
pending(i) = false
elems(i) = tp0
@@ -115,7 +115,7 @@ trait BaseTypeSeqs {
def map(f: Type => Type): BaseTypeSeq = {
// inlined `elems map f` for performance
val len = length
- var arr = new Array[Type](len)
+ val arr = new Array[Type](len)
var i = 0
while (i < len) {
arr(i) = f(elems(i))
@@ -158,7 +158,7 @@ trait BaseTypeSeqs {
val parents = tp.parents
// Console.println("computing baseTypeSeq of " + tsym.tpe + " " + parents)//DEBUG
val buf = new mutable.ListBuffer[Type]
- buf += tsym.tpe
+ buf += tsym.tpe_*
var btsSize = 1
if (parents.nonEmpty) {
val nparents = parents.length
@@ -193,15 +193,23 @@ trait BaseTypeSeqs {
i += 1
}
var minTypes: List[Type] = List()
+ def alreadyInMinTypes(tp: Type): Boolean = {
+ @annotation.tailrec def loop(tps: List[Type]): Boolean = tps match {
+ case Nil => false
+ case x :: xs => (tp =:= x) || loop(xs)
+ }
+ loop(minTypes)
+ }
+
i = 0
while (i < nparents) {
if (nextTypeSymbol(i) == minSym) {
nextRawElem(i) match {
case RefinedType(variants, decls) =>
for (tp <- variants)
- if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+ if (!alreadyInMinTypes(tp)) minTypes ::= tp
case tp =>
- if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+ if (!alreadyInMinTypes(tp)) minTypes ::= tp
}
index(i) = index(i) + 1
}
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 9f41f0336e..175943d264 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -1,8 +1,6 @@
package scala.reflect
package internal
-import Flags._
-
trait BuildUtils { self: SymbolTable =>
class BuildImpl extends BuildApi {
@@ -47,8 +45,6 @@ trait BuildUtils { self: SymbolTable =>
def flagsFromBits(bits: Long): FlagSet = bits
- def emptyValDef: ValDef = self.emptyValDef
-
def This(sym: Symbol): Tree = self.This(sym)
def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala
index 77909d9157..c262c8474a 100644
--- a/src/reflect/scala/reflect/internal/CapturedVariables.scala
+++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala
@@ -19,7 +19,7 @@ trait CapturedVariables { self: SymbolTable =>
/** Convert type of a captured variable to *Ref type.
*/
def capturedVariableType(vble: Symbol): Type =
- capturedVariableType(vble, NoType, false)
+ capturedVariableType(vble, NoType, erasedTypes = false)
/** Convert type of a captured variable to *Ref type.
*/
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index 7ccb661426..2ab3caa19d 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -342,7 +342,7 @@ object ClassfileConstants {
case JAVA_ACC_PRIVATE => PRIVATE
case JAVA_ACC_PROTECTED => PROTECTED
case JAVA_ACC_FINAL => FINAL
- case JAVA_ACC_SYNTHETIC => SYNTHETIC
+ case JAVA_ACC_SYNTHETIC => SYNTHETIC | ARTIFACT // maybe should be just artifact?
case JAVA_ACC_STATIC => STATIC
case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED
case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
@@ -372,7 +372,7 @@ object ClassfileConstants {
}
def methodFlags(jflags: Int): Long = {
initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0)
}
}
object FlagTranslation extends FlagTranslation { }
@@ -380,11 +380,4 @@ object ClassfileConstants {
def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags
def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags
def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags
-
- @deprecated("Use another method in this object", "2.10.0")
- def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = (
- if (isClass) toScalaClassFlags(flags)
- else if (isField) toScalaFieldFlags(flags)
- else toScalaMethodFlags(flags)
- )
}
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index 28bc3e1dd0..5ed2f675b2 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -94,7 +94,7 @@ trait Constants extends api.Constants {
def booleanValue: Boolean =
if (tag == BooleanTag) value.asInstanceOf[Boolean]
- else throw new Error("value " + value + " is not a boolean");
+ else throw new Error("value " + value + " is not a boolean")
def byteValue: Byte = tag match {
case ByteTag => value.asInstanceOf[Byte]
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 6e4ca76382..fe5a5c81e2 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -9,13 +9,12 @@ package internal
import scala.annotation.{ switch, meta }
import scala.collection.{ mutable, immutable }
import Flags._
-import PartialFunction._
import scala.reflect.api.{Universe => ApiUniverse}
trait Definitions extends api.StandardDefinitions {
self: SymbolTable =>
- import rootMirror.{getModule, getClassByName, getRequiredClass, getRequiredModule, getRequiredPackage, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule}
+ import rootMirror.{getModule, getPackage, getClassByName, getRequiredClass, getRequiredModule, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule}
object definitions extends DefinitionsClass
@@ -31,7 +30,7 @@ trait Definitions extends api.StandardDefinitions {
val clazz = owner.newClassSymbol(name, NoPosition, flags)
clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
}
- private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = {
+ private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = {
val msym = owner.newMethod(name.encode, NoPosition, flags)
val params = msym.newSyntheticValueParams(formals)
msym setInfo MethodType(params, restpe)
@@ -149,7 +148,6 @@ trait Definitions extends api.StandardDefinitions {
FloatClass,
DoubleClass
)
- def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol)
def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses
}
@@ -157,9 +155,6 @@ trait Definitions extends api.StandardDefinitions {
private var isInitialized = false
def isDefinitionsInitialized = isInitialized
- // symbols related to packages
- var emptypackagescope: Scope = null //debug
-
@deprecated("Moved to rootMirror.RootPackage", "2.10.0")
val RootPackage: ModuleSymbol = rootMirror.RootPackage
@@ -174,15 +169,13 @@ trait Definitions extends api.StandardDefinitions {
// It becomes tricky to create dedicated objects for other symbols because
// of initialization order issues.
- lazy val JavaLangPackage = getRequiredPackage(sn.JavaLang)
+ lazy val JavaLangPackage = getPackage("java.lang")
lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass
- lazy val ScalaPackage = getRequiredPackage(nme.scala_)
+ lazy val ScalaPackage = getPackage("scala")
lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass
- lazy val RuntimePackage = getRequiredPackage("scala.runtime")
+ lazy val RuntimePackage = getPackage("scala.runtime")
lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass
- lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]]
-
// convenient one-argument parameter lists
lazy val anyparam = List(AnyClass.tpe)
lazy val anyvalparam = List(AnyValClass.typeConstructor)
@@ -223,7 +216,7 @@ trait Definitions extends api.StandardDefinitions {
def fullyInitializeSymbol(sym: Symbol): Symbol = {
sym.initialize
fullyInitializeType(sym.info)
- fullyInitializeType(sym.tpe)
+ fullyInitializeType(sym.tpe_*)
sym
}
def fullyInitializeType(tp: Type): Type = {
@@ -235,14 +228,32 @@ trait Definitions extends api.StandardDefinitions {
scope.sorted foreach fullyInitializeSymbol
scope
}
+ /** Is this symbol a member of Object or Any? */
+ def isUniversalMember(sym: Symbol) = (
+ (sym ne NoSymbol)
+ && (ObjectClass isSubClass sym.owner)
+ )
+
+ /** Is this symbol unimportable? Unimportable symbols include:
+ * - constructors, because <init> is not a real name
+ * - private[this] members, which cannot be referenced from anywhere else
+ * - members of Any or Object, because every instance will inherit a
+ * definition which supersedes the imported one
+ */
+ def isUnimportable(sym: Symbol) = (
+ (sym eq NoSymbol)
+ || sym.isConstructor
+ || sym.isPrivateLocal
+ || isUniversalMember(sym)
+ )
+ def isImportable(sym: Symbol) = !isUnimportable(sym)
+
/** Is this type equivalent to Any, AnyVal, or AnyRef? */
def isTrivialTopType(tp: Type) = (
tp =:= AnyClass.tpe
|| tp =:= AnyValClass.tpe
|| tp =:= AnyRefClass.tpe
)
- /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */
- def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp))
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
case ClassInfoType(parents, decls, clazz) =>
@@ -253,7 +264,6 @@ trait Definitions extends api.StandardDefinitions {
}
case PolyType(tparams, restpe) =>
PolyType(tparams, fixupAsAnyTrait(restpe))
-// case _ => tpe
}
// top types
@@ -312,6 +322,9 @@ trait Definitions extends api.StandardDefinitions {
lazy val ThrowableClass = getClassByName(sn.Throwable)
lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError]
+ lazy val NPEConstructor = getMemberMethod(NullPointerExceptionClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.isEmpty)
+ lazy val UninitializedFieldConstructor = UninitializedErrorClass.primaryConstructor
+
// fundamental reference classes
lazy val PartialFunctionClass = requiredClass[PartialFunction[_,_]]
lazy val AbstractPartialFunctionClass = requiredClass[scala.runtime.AbstractPartialFunction[_,_]]
@@ -335,15 +348,11 @@ trait Definitions extends api.StandardDefinitions {
lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass)
lazy val PredefModule = requiredModule[scala.Predef.type]
- lazy val PredefModuleClass = PredefModule.moduleClass
-
def Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
- def Predef_identity = getMemberMethod(PredefModule, nme.identity)
- def Predef_conforms = getMemberMethod(PredefModule, nme.conforms)
- def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
- def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
- def Predef_??? = getMemberMethod(PredefModule, nme.???)
- def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
+ def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
+ def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
+ def Predef_??? = getMemberMethod(PredefModule, nme.???)
+ def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
/** Is `sym` a member of Predef with the given name?
* Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
@@ -359,7 +368,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val SpecializableModule = requiredModule[Specializable]
lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group)
- lazy val ConsoleModule = requiredModule[scala.Console.type]
lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type]
lazy val SymbolModule = requiredModule[scala.Symbol.type]
lazy val Symbol_apply = getMemberMethod(SymbolModule, nme.apply)
@@ -369,9 +377,7 @@ trait Definitions extends api.StandardDefinitions {
def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length)
def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
- def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements)
def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
- def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass)
// classes with special meanings
lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
@@ -382,11 +388,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter]
lazy val DelayedInitClass = requiredClass[scala.DelayedInit]
def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
- // a dummy value that communicates that a delayedInit call is compiler-generated
- // from phase UnCurry to phase Constructors
- // !!! This is not used anywhere (it was checked in that way.)
- // def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
- // .setInfo(UnitClass.tpe)
lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint]
lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL)
@@ -400,7 +401,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.tpe)
- lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.tpe)
lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe))
lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.tpe))
@@ -408,7 +408,8 @@ trait Definitions extends api.StandardDefinitions {
def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass
def isJavaRepeatedParamType(tp: Type) = tp.typeSymbol == JavaRepeatedParamClass
def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
- def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe)
+ def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe_*)
+ def isByName(param: Symbol) = isByNameParamType(param.tpe_*)
def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf
def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
@@ -433,10 +434,6 @@ trait Definitions extends api.StandardDefinitions {
case _ => tp
}
- def isPrimitiveArray(tp: Type) = tp match {
- case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol)
- case _ => false
- }
def isReferenceArray(tp: Type) = tp match {
case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe
case _ => false
@@ -446,11 +443,8 @@ trait Definitions extends api.StandardDefinitions {
case _ => false
}
- lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy")
-
// collections classes
lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
- lazy val IterableClass = requiredClass[scala.collection.Iterable[_]]
lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]]
lazy val ListClass = requiredClass[scala.collection.immutable.List[_]]
lazy val SeqClass = requiredClass[scala.collection.Seq[_]]
@@ -461,8 +455,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val List_apply = getMemberMethod(ListModule, nme.apply)
lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type]
lazy val SeqModule = requiredModule[scala.collection.Seq.type]
- lazy val IteratorModule = requiredModule[scala.collection.Iterator.type]
- lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply)
// arrays and their members
lazy val ArrayModule = requiredModule[scala.Array.type]
@@ -477,9 +469,7 @@ trait Definitions extends api.StandardDefinitions {
// reflection / structural types
lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]]
- lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]]
lazy val MethodClass = getClassByName(sn.MethodAsObject)
- def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible)
lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache]
lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache]
def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_)
@@ -503,7 +493,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol
def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol
def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol
- lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol
lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
@@ -529,7 +518,6 @@ trait Definitions extends api.StandardDefinitions {
def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol
def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol
def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol
- def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol
lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
lazy val StringContextClass = requiredClass[scala.StringContext]
@@ -551,7 +539,7 @@ trait Definitions extends api.StandardDefinitions {
// The given symbol represents either String.+ or StringAdd.+
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
- def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym
+ def isArrowAssoc(sym: Symbol) = sym.owner == ArrowAssocClass
// The given symbol is a method with the right name and signature to be a runnable java program.
def isJavaMainMethod(sym: Symbol) = (sym.name == nme.main) && (sym.info match {
@@ -561,12 +549,6 @@ trait Definitions extends api.StandardDefinitions {
// The given class has a main method.
def hasJavaMainMethod(sym: Symbol): Boolean =
(sym.tpe member nme.main).alternatives exists isJavaMainMethod
- def hasJavaMainMethod(path: String): Boolean =
- hasJavaMainMethod(getModuleIfDefined(path))
-
- def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass
- def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass
- def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule
// Product, Tuple, Function, AbstractFunction
private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = {
@@ -589,7 +571,6 @@ trait Definitions extends api.StandardDefinitions {
/** Creators for TupleN, ProductN, FunctionN. */
def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems)
- def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems)
def functionType(formals: List[Type], restpe: Type) = aritySpecificType(FunctionClass, formals, restpe)
def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe)
@@ -608,10 +589,6 @@ trait Definitions extends api.StandardDefinitions {
else nme.genericWrapArray
}
- @deprecated("Use isTupleType", "2.10.0")
- def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp)
-
- def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j))
// NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
def isProductNClass(sym: Symbol) = ProductClass contains sym
@@ -649,7 +626,7 @@ trait Definitions extends api.StandardDefinitions {
len <= MaxTupleArity && sym == TupleClass(len)
case _ => false
}
- def isTupleType(tp: Type) = isTupleTypeDirect(tp.normalize)
+ def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
@@ -657,13 +634,8 @@ trait Definitions extends api.StandardDefinitions {
def Product_iterator = getMemberMethod(ProductRootClass, nme.productIterator)
def Product_productPrefix = getMemberMethod(ProductRootClass, nme.productPrefix)
def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_)
- // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName)
def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j))
- def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j)
-
- /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */
- def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol)
/** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match {
@@ -676,19 +648,17 @@ trait Definitions extends api.StandardDefinitions {
case _ => tp
}
- def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
- case RefinedType(p :: _, _) => p.normalize
+ def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+ case RefinedType(p :: _, _) => p.dealiasWiden
case tp => tp
}
- def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply)
-
def abstractFunctionForFunctionType(tp: Type) = {
assert(isFunctionType(tp), tp)
abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
}
- def isFunctionType(tp: Type): Boolean = tp.normalize match {
+ def isFunctionType(tp: Type): Boolean = tp.dealiasWiden match {
case TypeRef(_, sym, args) if args.nonEmpty =>
val arity = args.length - 1 // -1 is the return type
arity <= MaxFunctionArity && sym == FunctionClass(arity)
@@ -701,8 +671,6 @@ trait Definitions extends api.StandardDefinitions {
(sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass)
}
- def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType
-
def elementType(container: Symbol, tp: Type): Type = tp match {
case TypeRef(_, `container`, arg :: Nil) => arg
case _ => NoType
@@ -715,14 +683,23 @@ trait Definitions extends api.StandardDefinitions {
def optionType(tp: Type) = appliedType(OptionClass, tp)
def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
def seqType(arg: Type) = appliedType(SeqClass, arg)
- def someType(tp: Type) = appliedType(SomeClass, tp)
- def StringArray = arrayType(StringClass.tpe)
- lazy val ObjectArray = arrayType(ObjectClass.tpe)
+ def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg)
- def ClassType(arg: Type) =
- if (phase.erasedTypes || forMSIL) ClassClass.tpe
- else appliedType(ClassClass, arg)
+ /** Can we tell by inspecting the symbol that it will never
+ * at any phase have type parameters?
+ */
+ def neverHasTypeParameters(sym: Symbol) = sym match {
+ case _: RefinementClassSymbol => true
+ case _: ModuleClassSymbol => true
+ case _: ImplClassSymbol => true
+ case _ =>
+ (
+ sym.isPrimitiveValueClass
+ || sym.isAnonymousClass
+ || sym.initialize.isMonomorphicType
+ )
+ }
def EnumType(sym: Symbol) =
// given (in java): "class A { enum E { VAL1 } }"
@@ -731,9 +708,6 @@ trait Definitions extends api.StandardDefinitions {
// - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
sym.owner.linkedClassOfClass.tpe
- def vmClassType(arg: Type): Type = ClassType(arg)
- def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!!
-
/** Given a class symbol C with type parameters T1, T2, ... Tn
* which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
* returns an existential type of the form
@@ -741,48 +715,10 @@ trait Definitions extends api.StandardDefinitions {
* C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }.
*/
def classExistentialType(clazz: Symbol): Type =
- newExistentialType(clazz.typeParams, clazz.tpe)
+ existentialAbstraction(clazz.typeParams, clazz.tpe_*)
- /** Given type U, creates a Type representing Class[_ <: U].
- */
- def boundedClassType(upperBound: Type) =
- appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound))
-
- /** To avoid unchecked warnings on polymorphic classes, translate
- * a Foo[T] into a Foo[_] for use in the pattern matcher.
- */
- @deprecated("Use classExistentialType", "2.10.0")
- def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz)
-
- //
- // .NET backend
- //
-
- lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator")
- // System.ValueType
- lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType)
- // System.MulticastDelegate
- lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate)
- var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported.
- // Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType)
- // var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _
- lazy val Delegate_scalaCallerTargets: mutable.HashMap[Symbol, Symbol] = mutable.HashMap()
-
- def isCorrespondingDelegate(delegateType: Type, functionType: Type): Boolean = {
- isSubType(delegateType, DelegateClass.tpe) &&
- (delegateType.member(nme.apply).tpe match {
- case MethodType(delegateParams, delegateReturn) =>
- isFunctionType(functionType) &&
- (functionType.normalize match {
- case TypeRef(_, _, args) =>
- (delegateParams.map(pt => {
- if (pt.tpe == AnyClass.tpe) definitions.ObjectClass.tpe else pt})
- ::: List(delegateReturn)) == args
- case _ => false
- })
- case _ => false
- })
- }
+ def unsafeClassExistentialType(clazz: Symbol): Type =
+ existentialAbstraction(clazz.unsafeTypeParams, clazz.tpe_*)
// members of class scala.Any
lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL)
@@ -852,12 +788,7 @@ trait Definitions extends api.StandardDefinitions {
else
x :: removeRedundantObjects(xs)
}
- /** Order a list of types with non-trait classes before others. */
- def classesFirst(tps: List[Type]): List[Type] = {
- val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait)
- if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps
- else classes ::: others
- }
+
/** The following transformations applied to a list of parents.
* If any parent is a class/trait, all parents which normalize to
* Object are discarded. Otherwise, all parents which normalize
@@ -885,10 +816,6 @@ trait Definitions extends api.StandardDefinitions {
def parentsString(parents: List[Type]) =
normalizedParents(parents) mkString " with "
- def typeParamsString(tp: Type) = tp match {
- case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]")
- case _ => ""
- }
def valueParamsString(tp: Type) = tp match {
case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")")
case _ => ""
@@ -900,8 +827,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL)
lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL)
lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL)
- lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC)(_ => booltype)
- lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC)(_.typeConstructor)
+ lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_ => booltype)
+ lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_.typeConstructor)
lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps =>
(Some(List(tps.head.typeConstructor)), tps.head.typeConstructor)
)
@@ -925,12 +852,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val BoxedNumberClass = getClassByName(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClassByName(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClassByName(sn.BoxedBoolean)
- lazy val BoxedByteClass = requiredClass[java.lang.Byte]
- lazy val BoxedShortClass = requiredClass[java.lang.Short]
- lazy val BoxedIntClass = requiredClass[java.lang.Integer]
- lazy val BoxedLongClass = requiredClass[java.lang.Long]
- lazy val BoxedFloatClass = requiredClass[java.lang.Float]
- lazy val BoxedDoubleClass = requiredClass[java.lang.Double]
lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean)
lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber)
@@ -951,7 +872,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound]
lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration]
lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp]
- lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated
lazy val SwitchClass = requiredClass[scala.annotation.switch]
lazy val TailrecClass = requiredClass[scala.annotation.tailrec]
lazy val VarargsClass = requiredClass[scala.annotation.varargs]
@@ -960,7 +880,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
- lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
@@ -985,8 +904,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val GetterTargetClass = requiredClass[meta.getter]
lazy val ParamTargetClass = requiredClass[meta.param]
lazy val SetterTargetClass = requiredClass[meta.setter]
- lazy val ClassTargetClass = requiredClass[meta.companionClass]
lazy val ObjectTargetClass = requiredClass[meta.companionObject]
+ lazy val ClassTargetClass = requiredClass[meta.companionClass]
lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject?
lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature]
@@ -1005,11 +924,21 @@ trait Definitions extends api.StandardDefinitions {
// Trying to allow for deprecated locations
sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
)
- lazy val metaAnnotations = Set[Symbol](
- FieldTargetClass, ParamTargetClass,
- GetterTargetClass, SetterTargetClass,
- BeanGetterTargetClass, BeanSetterTargetClass
- )
+ lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members filter (_ isSubClass StaticAnnotationClass) toSet
+
+ // According to the scala.annotation.meta package object:
+ // * By default, annotations on (`val`-, `var`- or plain) constructor parameters
+ // * end up on the parameter, not on any other entity. Annotations on fields
+ // * by default only end up on the field.
+ def defaultAnnotationTarget(t: Tree): Symbol = t match {
+ case ClassDef(_, _, _, _) => ClassTargetClass
+ case ModuleDef(_, _, _) => ObjectTargetClass
+ case vd @ ValDef(_, _, _, _) if vd.symbol.isParamAccessor => ParamTargetClass
+ case vd @ ValDef(_, _, _, _) if vd.symbol.isValueParameter => ParamTargetClass
+ case ValDef(_, _, _, _) => FieldTargetClass
+ case DefDef(_, _, _, _, _, _) => MethodTargetClass
+ case _ => GetterTargetClass
+ }
lazy val AnnotationDefaultAttr: ClassSymbol = {
val attr = enterNewClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.tpe))
@@ -1031,7 +960,6 @@ trait Definitions extends api.StandardDefinitions {
def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name))
def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
- def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
def findNamedMember(fullName: Name, root: Symbol): Symbol = {
val segs = nme.segments(fullName.toString, fullName.isTermName)
@@ -1071,7 +999,6 @@ trait Definitions extends api.StandardDefinitions {
}
}
def getMemberClass(owner: Symbol, name: Name): ClassSymbol = {
- val y = getMember(owner, name.toTypeName)
getMember(owner, name.toTypeName) match {
case x: ClassSymbol => x
case _ => fatalMissingSymbol(owner, name, "member class")
@@ -1099,9 +1026,6 @@ trait Definitions extends api.StandardDefinitions {
def getDeclIfDefined(owner: Symbol, name: Name): Symbol =
owner.info.nonPrivateDecl(name)
- def packageExists(packageName: String): Boolean =
- getModuleIfDefined(packageName).isPackage
-
private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol =
owner.newAliasType(name) setInfoAndEnter alias
@@ -1133,7 +1057,6 @@ trait Definitions extends api.StandardDefinitions {
newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head)))
}
- lazy val boxedClassValues = boxedClass.values.toSet[Symbol]
lazy val isUnbox = unboxMethod.values.toSet[Symbol]
lazy val isBox = boxMethod.values.toSet[Symbol]
@@ -1150,8 +1073,7 @@ trait Definitions extends api.StandardDefinitions {
AnyValClass,
NullClass,
NothingClass,
- SingletonClass,
- EqualsPatternClass
+ SingletonClass
)
/** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
lazy val syntheticCoreMethods = List(
@@ -1193,8 +1115,6 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a value class? */
def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym
- def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass)
- def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass)
def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol)
/** Is symbol a boxed value class, e.g. java.lang.Integer? */
@@ -1223,10 +1143,10 @@ trait Definitions extends api.StandardDefinitions {
}
def flatNameString(sym: Symbol, separator: Char): String =
if (sym == NoSymbol) "" // be more resistant to error conditions, e.g. neg/t3222.scala
- else if (sym.owner.isPackageClass) sym.javaClassName
+ else if (sym.isTopLevel) sym.javaClassName
else flatNameString(sym.owner, separator) + nme.NAME_JOIN_STRING + sym.simpleName
def signature1(etp: Type): String = {
- if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
+ if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.dealiasWiden.typeArgs.head))
else if (isPrimitiveValueClass(etp.typeSymbol)) abbrvTag(etp.typeSymbol).toString()
else "L" + flatNameString(etp.typeSymbol, '/') + ";"
}
@@ -1235,49 +1155,11 @@ trait Definitions extends api.StandardDefinitions {
else flatNameString(etp.typeSymbol, '.')
}
- /** Surgery on the value classes. Without this, AnyVals defined in source
- * files end up with an AnyRef parent. It is likely there is a better way
- * to evade that AnyRef.
- */
- private def setParents(sym: Symbol, parents: List[Type]): Symbol = sym.rawInfo match {
- case ClassInfoType(_, scope, clazz) =>
- sym setInfo ClassInfoType(parents, scope, clazz)
- case _ =>
- sym
- }
-
def init() {
if (isInitialized) return
// force initialization of every symbol that is synthesized or hijacked by the compiler
- val forced = symbolsNotPresentInBytecode
+ val _ = symbolsNotPresentInBytecode
isInitialized = true
} //init
-
- var nbScalaCallers: Int = 0
- def newScalaCaller(delegateType: Type): MethodSymbol = {
- assert(forMSIL, "scalaCallers can only be created if target is .NET")
- // object: reference to object on which to call (scala-)method
- val paramTypes: List[Type] = List(ObjectClass.tpe)
- val name = newTermName("$scalaCaller$$" + nbScalaCallers)
- // tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
- // type parameter =-> a MethodType in this case
- // TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam
- val newCaller = enterNewMethod(DelegateClass, name, paramTypes, delegateType, FINAL | STATIC)
- // val newCaller = newPolyMethod(DelegateClass, name,
- // tparam => MethodType(paramTypes, tparam.typeConstructor)) setFlag (FINAL | STATIC)
- Delegate_scalaCallers = Delegate_scalaCallers ::: List(newCaller)
- nbScalaCallers += 1
- newCaller
- }
-
- // def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol, delType: Type) {
- // assert(Delegate_scalaCallers contains scalaCaller)
- // Delegate_scalaCallerInfos += (scalaCaller -> (methSym, delType))
- // }
-
- def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol) {
- assert(Delegate_scalaCallers contains scalaCaller)
- Delegate_scalaCallerTargets += (scalaCaller -> methSym)
- }
}
}
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 8b24678fd6..281a32caf6 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -7,7 +7,6 @@ package scala.reflect
package internal
import scala.collection.{ mutable, immutable }
-import util._
/** The name of this trait defines the eventual intent better than
* it does the initial contents.
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index 86cbba9c50..1987f34474 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -116,6 +116,20 @@ class ModifierFlags {
final val LAZY = 1L << 31 // symbol is a lazy val. can't have MUTABLE unless transformed by typer
final val PRESUPER = 1L << 37 // value is evaluated before super call
final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
+ final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+
+ /** Symbols which are marked ARTIFACT. (Expand this list?)
+ *
+ * - $outer fields and accessors
+ * - super accessors
+ * - protected accessors
+ * - lazy local accessors
+ * - bridge methods
+ * - default argument getters
+ * - evaluation-order preserving locals for right-associative and out-of-order named arguments
+ * - catch-expression storing vals
+ * - anything else which feels a setFlag(ARTIFACT)
+ */
// Overridden.
def flagToString(flag: Long): String = ""
@@ -161,11 +175,10 @@ class Flags extends ModifierFlags {
final val VBRIDGE = 1L << 42 // symbol is a varargs bridge
final val VARARGS = 1L << 43 // symbol is a Java-style varargs method
- final val TRIEDCOOKING = 1L << 44 // ``Cooking'' has been tried on this symbol
- // A Java method's type is ``cooked'' by transforming raw types to existentials
+ final val TRIEDCOOKING = 1L << 44 // `Cooking` has been tried on this symbol
+ // A Java method's type is `cooked` by transforming raw types to existentials
final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED
- final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
// ------- shift definitions -------------------------------------------------------
@@ -248,7 +261,7 @@ class Flags extends ModifierFlags {
/** These modifiers appear in TreePrinter output. */
final val PrintableFlags =
ExplicitFlags | BridgeFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO |
- ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED
+ ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | ARTIFACT
/** When a symbol for a field is created, only these flags survive
* from Modifiers. Others which may be applied at creation time are:
@@ -420,7 +433,7 @@ class Flags extends ModifierFlags {
case VARARGS => "<varargs>" // (1L << 43)
case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
case SYNCHRONIZED => "<synchronized>" // (1L << 45)
- case 0x400000000000L => "" // (1L << 46)
+ case ARTIFACT => "<artifact>" // (1L << 46)
case 0x800000000000L => "" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
case 0x2000000000000L => "" // (1L << 49)
@@ -495,4 +508,4 @@ class Flags extends ModifierFlags {
final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
}
-object Flags extends Flags { }
+object Flags extends Flags
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index 4a3663b8ea..6f8befd23e 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -158,13 +158,14 @@ trait HasFlags {
else nonAccess + " " + access
}
+ // Guess this can't be deprecated seeing as it's in the reflect API.
+ def isParameter = hasFlag(PARAM)
+
// Backward compat section
@deprecated( "Use isTrait", "2.10.0")
def hasTraitFlag = hasFlag(TRAIT)
@deprecated("Use hasDefault", "2.10.0")
def hasDefaultFlag = hasFlag(DEFAULTPARAM)
- @deprecated("Use isValueParameter or isTypeParameter", "2.10.0")
- def isParameter = hasFlag(PARAM)
@deprecated("Use flagString", "2.10.0")
def defaultFlagString = flagString
@deprecated("Use flagString(mask)", "2.10.0")
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 43902c1930..53410b29c5 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -316,7 +316,6 @@ trait Importers extends api.Importers { self: SymbolTable =>
def importName(name: from.Name): Name =
if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString)
def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName
- def importTermName(name: from.TermName): TermName = importName(name).toTermName
def importModifiers(mods: from.Modifiers): Modifiers =
new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
@@ -334,6 +333,8 @@ trait Importers extends api.Importers { self: SymbolTable =>
new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
case from.emptyValDef =>
emptyValDef
+ case from.pendingSuperCall =>
+ pendingSuperCall
case from.ValDef(mods, name, tpt, rhs) =>
new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
@@ -427,18 +428,18 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
addFixup({
if (mytree != null) {
- val mysym = if (tree.hasSymbol) importSymbol(tree.symbol) else NoSymbol
+ val mysym = if (tree.hasSymbolField) importSymbol(tree.symbol) else NoSymbol
val mytpe = importType(tree.tpe)
mytree match {
case mytt: TypeTree =>
val tt = tree.asInstanceOf[from.TypeTree]
- if (mytree.hasSymbol) mytt.symbol = mysym
+ if (mytree.hasSymbolField) mytt.symbol = mysym
if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
if (tt.original != null) mytt.setOriginal(importTree(tt.original))
case _ =>
- if (mytree.hasSymbol) mytree.symbol = importSymbol(tree.symbol)
- mytree.tpe = importType(tree.tpe)
+ if (mytree.hasSymbolField) mytree.symbol = importSymbol(tree.symbol)
+ mytree setType importType(tree.tpe)
}
}
})
diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala
index 82904b0b68..4e84a29fd0 100644
--- a/src/reflect/scala/reflect/internal/InfoTransformers.scala
+++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala
@@ -43,7 +43,7 @@ trait InfoTransformers {
if (from == this.pid) this
else if (from < this.pid)
if (prev.pid < from) this
- else prev.nextFrom(from);
+ else prev.nextFrom(from)
else if (next.pid == NoPhase.id) next
else next.nextFrom(from)
}
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 08686832ef..cc8dd16d69 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -36,7 +36,7 @@ trait Kinds {
private def varStr(s: Symbol): String =
if (s.isCovariant) "covariant"
else if (s.isContravariant) "contravariant"
- else "invariant";
+ else "invariant"
private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
if((a0 eq b0) || (a0.owner eq b0.owner)) ""
@@ -86,15 +86,15 @@ trait Kinds {
// plan: split into kind inference and subkinding
// every Type has a (cached) Kind
def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
- checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+ checkKindBounds0(tparams, targs, pre, owner, explainErrors = false).isEmpty
/** Check whether `sym1`'s variance conforms to `sym2`'s variance.
*
* If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
*/
private def variancesMatch(sym1: Symbol, sym2: Symbol) = (
- sym2.variance==0
- || sym1.variance==sym2.variance
+ sym2.variance.isInvariant
+ || sym1.variance == sym2.variance
)
/** Check well-kindedness of type application (assumes arities are already checked) -- @M
@@ -229,4 +229,4 @@ trait Kinds {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 0beb8e368f..d9f1d90b62 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -19,6 +19,8 @@ trait Mirrors extends api.Mirrors {
trait RootSymbol extends Symbol { def mirror: Mirror }
abstract class RootsBase(rootOwner: Symbol) extends scala.reflect.api.Mirror[Mirrors.this.type] { thisMirror =>
+ private[this] var initialized = false
+ def isMirrorInitialized = initialized
protected[scala] def rootLoader: LazyType
@@ -40,7 +42,7 @@ trait Mirrors extends api.Mirrors {
if (point > 0) getModuleOrClass(path.toTermName, point)
else RootClass
val name = path subName (point + 1, len)
- var sym = owner.info member name
+ val sym = owner.info member name
val result = if (path.isTermName) sym.suchThat(_ hasFlag MODULE) else sym
if (result != NoSymbol) result
else {
@@ -76,7 +78,9 @@ trait Mirrors extends api.Mirrors {
protected def universeMissingHook(owner: Symbol, name: Name): Symbol = thisUniverse.missingHook(owner, name)
- private[scala] def missingHook(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
+ private[scala] def missingHook(owner: Symbol, name: Name): Symbol = logResult(s"missingHook($owner, $name)")(
+ mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
+ )
// todo: get rid of most the methods here and keep just staticClass/Module/Package
@@ -168,14 +172,15 @@ trait Mirrors extends api.Mirrors {
case _ => MissingRequirementError.notFound("package " + fullname)
}
- def getPackage(fullname: Name): ModuleSymbol =
+ def getPackage(fullname: TermName): ModuleSymbol =
ensurePackageSymbol(fullname.toString, getModuleOrClass(fullname), allowModules = true)
- def getRequiredPackage(fullname: String): ModuleSymbol =
+ @deprecated("Use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol =
getPackage(newTermNameCached(fullname))
- def getPackageObject(fullname: String): ModuleSymbol =
- (getPackage(newTermName(fullname)).info member nme.PACKAGE) match {
+ def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname))
+ def getPackageObject(fullname: TermName): ModuleSymbol =
+ (getPackage(fullname).info member nme.PACKAGE) match {
case x: ModuleSymbol => x
case _ => MissingRequirementError.notFound("package object " + fullname)
}
@@ -183,8 +188,8 @@ trait Mirrors extends api.Mirrors {
def getPackageObjectIfDefined(fullname: String): Symbol =
getPackageObjectIfDefined(newTermNameCached(fullname))
- def getPackageObjectIfDefined(fullname: Name): Symbol =
- wrapMissing(getPackageObject(fullname.toTermName))
+ def getPackageObjectIfDefined(fullname: TermName): Symbol =
+ wrapMissing(getPackageObject(fullname))
override def staticPackage(fullname: String): ModuleSymbol =
ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
@@ -202,7 +207,7 @@ trait Mirrors extends api.Mirrors {
erasureString(classTag[T].runtimeClass)
}
- @inline private def wrapMissing(body: => Symbol): Symbol =
+ @inline final def wrapMissing(body: => Symbol): Symbol =
try body
catch { case _: MissingRequirementError => NoSymbol }
@@ -228,6 +233,7 @@ trait Mirrors extends api.Mirrors {
// }
def init() {
+ if (initialized) return
// Still fiddling with whether it's cleaner to do some of this setup here
// or from constructors. The latter approach tends to invite init order issues.
@@ -239,6 +245,8 @@ trait Mirrors extends api.Mirrors {
RootClass.info.decls enter EmptyPackage
RootClass.info.decls enter RootPackage
+
+ initialized = true
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/reflect/scala/reflect/internal/Mode.scala
index d650762ac1..850e3b5669 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/reflect/scala/reflect/internal/Mode.scala
@@ -3,75 +3,76 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
-package typechecker
+package scala.reflect
+package internal
+
+object Mode {
+ private implicit def liftIntBitsToMode(bits: Int): Mode = apply(bits)
+ def apply(bits: Int): Mode = new Mode(bits)
-/** Mode constants.
- */
-trait Modes {
/** NOmode, EXPRmode and PATTERNmode are mutually exclusive.
*/
- final val NOmode = 0x000
- final val EXPRmode = 0x001
- final val PATTERNmode = 0x002
+ final val NOmode: Mode = 0x000
+ final val EXPRmode: Mode = 0x001
+ final val PATTERNmode: Mode = 0x002
/** TYPEmode needs a comment. <-- XXX.
*/
- final val TYPEmode = 0x004
+ final val TYPEmode: Mode = 0x004
/** SCCmode is orthogonal to above. When set we are
* in the this or super constructor call of a constructor.
*/
- final val SCCmode = 0x008
+ final val SCCmode: Mode = 0x008
/** FUNmode is orthogonal to above.
* When set we are looking for a method or constructor.
*/
- final val FUNmode = 0x010
+ final val FUNmode: Mode = 0x010
/** POLYmode is orthogonal to above.
* When set expression types can be polymorphic.
*/
- final val POLYmode = 0x020
+ final val POLYmode: Mode = 0x020
/** QUALmode is orthogonal to above. When set
* expressions may be packages and Java statics modules.
*/
- final val QUALmode = 0x040
+ final val QUALmode: Mode = 0x040
/** TAPPmode is set for the function/type constructor
* part of a type application. When set we do not decompose PolyTypes.
*/
- final val TAPPmode = 0x080
+ final val TAPPmode: Mode = 0x080
/** SUPERCONSTRmode is set for the super
* in a superclass constructor call super.<init>.
*/
- final val SUPERCONSTRmode = 0x100
+ final val SUPERCONSTRmode: Mode = 0x100
/** SNDTRYmode indicates that an application is typed for the 2nd time.
* In that case functions may no longer be coerced with implicit views.
*/
- final val SNDTRYmode = 0x200
+ final val SNDTRYmode: Mode = 0x200
/** LHSmode is set for the left-hand side of an assignment.
*/
- final val LHSmode = 0x400
+ final val LHSmode: Mode = 0x400
/** STARmode is set when star patterns are allowed.
* (This was formerly called REGPATmode.)
*/
- final val STARmode = 0x1000
+ final val STARmode: Mode = 0x1000
/** ALTmode is set when we are under a pattern alternative.
*/
- final val ALTmode = 0x2000
+ final val ALTmode: Mode = 0x2000
/** HKmode is set when we are typing a higher-kinded type.
* adapt should then check kind-arity based on the prototypical type's
* kind arity. Type arguments should not be inferred.
*/
- final val HKmode = 0x4000 // @M: could also use POLYmode | TAPPmode
+ final val HKmode: Mode = 0x4000 // @M: could also use POLYmode | TAPPmode
/** BYVALmode is set when we are typing an expression
* that occurs in a by-value position. An expression e1 is in by-value
@@ -80,38 +81,17 @@ trait Modes {
* arguments or the conditional of an if-then-else clause.
* This mode has been added to support continuations.
*/
- final val BYVALmode = 0x8000
+ final val BYVALmode: Mode = 0x8000
/** TYPEPATmode is set when we are typing a type in a pattern.
*/
- final val TYPEPATmode = 0x10000
+ final val TYPEPATmode: Mode = 0x10000
/** RETmode is set when we are typing a return expression.
*/
- final val RETmode = 0x20000
-
- final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
-
- final def onlyStickyModes(mode: Int) =
- mode & StickyModes
-
- final def forFunMode(mode: Int) =
- mode & (StickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
-
- final def forTypeMode(mode: Int) =
- if (inAnyMode(mode, PATTERNmode | TYPEPATmode)) TYPEmode | TYPEPATmode
- else TYPEmode
+ final val RETmode: Mode = 0x20000
- final def inAllModes(mode: Int, required: Int) = (mode & required) == required
- final def inAnyMode(mode: Int, required: Int) = (mode & required) != 0
- final def inNoModes(mode: Int, prohibited: Int) = (mode & prohibited) == 0
- final def inHKMode(mode: Int) = (mode & HKmode) != 0
- final def inFunMode(mode: Int) = (mode & FUNmode) != 0
- final def inPolyMode(mode: Int) = (mode & POLYmode) != 0
- final def inPatternMode(mode: Int) = (mode & PATTERNmode) != 0
- final def inExprModeOr(mode: Int, others: Int) = (mode & (EXPRmode | others)) != 0
- final def inExprModeButNot(mode: Int, prohibited: Int) =
- (mode & (EXPRmode | prohibited)) == EXPRmode
+ final private val StickyModes: Mode = EXPRmode | PATTERNmode | TYPEmode | ALTmode
/** Translates a mask of mode flags into something readable.
*/
@@ -133,8 +113,37 @@ trait Modes {
(1 << 14) -> "HKmode",
(1 << 15) -> "BYVALmode",
(1 << 16) -> "TYPEPATmode"
- )
- def modeString(mode: Int): String =
- if (mode == 0) "NOmode"
- else (modeNameMap filterKeys (bit => inAllModes(mode, bit))).values mkString " "
+ ).map({ case (k, v) => Mode(k) -> v })
+}
+import Mode._
+
+final class Mode private (val bits: Int) extends AnyVal {
+ def &(other: Mode): Mode = new Mode(bits & other.bits)
+ def |(other: Mode): Mode = new Mode(bits | other.bits)
+ def &~(other: Mode): Mode = new Mode(bits & ~(other.bits))
+
+ def onlySticky = this & Mode.StickyModes
+ def forFunMode = this & (Mode.StickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
+ def forTypeMode =
+ if (inAny(PATTERNmode | TYPEPATmode)) TYPEmode | TYPEPATmode
+ else TYPEmode
+
+ def inAll(required: Mode) = (this & required) == required
+ def inAny(required: Mode) = (this & required) !=NOmode
+ def inNone(prohibited: Mode) = (this & prohibited) == NOmode
+ def inHKMode = inAll(HKmode)
+ def inFunMode = inAll(FUNmode)
+ def inPolyMode = inAll(POLYmode)
+ def inPatternMode = inAll(PATTERNmode)
+ def inExprMode = inAll(EXPRmode)
+ def inByValMode = inAll(BYVALmode)
+ def inRetMode = inAll(RETmode)
+
+ def inPatternNotFunMode = inPatternMode && !inFunMode
+ def inExprModeOr(others: Mode) = inAny(EXPRmode | others)
+ def inExprModeButNot(prohibited: Mode) = inAll(EXPRmode) && inNone(prohibited)
+
+ override def toString =
+ if (bits == 0) "NOmode"
+ else (modeNameMap filterKeys inAll).values.toList.sorted mkString " "
}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index c78ba72dfb..f8598dca7a 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -10,22 +10,7 @@ import scala.io.Codec
import java.security.MessageDigest
import scala.language.implicitConversions
-trait LowPriorityNames {
- self: Names =>
-
- implicit def nameToNameOps(name: Name): NameOps[Name] = new NameOps[Name](name)
-}
-
-/** The class Names ...
- *
- * @author Martin Odersky
- * @version 1.0, 05/02/2005
- */
-trait Names extends api.Names with LowPriorityNames {
- implicit def promoteTermNamesAsNecessary(name: Name): TermName = name.toTermName
-
-// Operations -------------------------------------------------------------
-
+trait Names extends api.Names {
private final val HASH_SIZE = 0x8000
private final val HASH_MASK = 0x7FFF
private final val NAME_SIZE = 0x20000
@@ -49,7 +34,7 @@ trait Names extends api.Names with LowPriorityNames {
cs(offset) * (41 * 41) +
cs(offset + len - 1) * 41 +
cs(offset + (len >> 1)))
- else 0;
+ else 0
/** Is (the ASCII representation of) name at given index equal to
* cs[offset..offset+len-1]?
@@ -57,7 +42,7 @@ trait Names extends api.Names with LowPriorityNames {
private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
var i = 0
while ((i < len) && (chrs(index + i) == cs(offset + i)))
- i += 1;
+ i += 1
i == len
}
@@ -135,9 +120,6 @@ trait Names extends api.Names with LowPriorityNames {
def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
newTermName(bs, offset, len).toTypeName
- def nameChars: Array[Char] = chrs
- @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s)
-
// Classes ----------------------------------------------------------------------
/** The name class.
@@ -186,28 +168,20 @@ trait Names extends api.Names with LowPriorityNames {
scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
/** @return the ascii representation of this name */
- final def toChars: Array[Char] = {
+ final def toChars: Array[Char] = { // used by ide
val cs = new Array[Char](len)
copyChars(cs, 0)
cs
}
- /** Write to UTF8 representation of this name to given character array.
- * Start copying to index `to`. Return index of next free byte in array.
- * Array must have enough remaining space for all bytes
- * (i.e. maximally 3*length bytes).
- */
- final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
- val bytes = Codec.toUTF8(chrs, index, len)
- scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
- offset + bytes.length
- }
-
/** @return the hash value of this name */
final override def hashCode(): Int = index
- // Presently disabled.
- // override def equals(other: Any) = paranoidEquals(other)
+ /****
+ * This has been quite useful to find places where people are comparing
+ * a TermName and a TypeName, or a Name and a String.
+
+ override def equals(other: Any) = paranoidEquals(other)
private def paranoidEquals(other: Any): Boolean = {
val cmp = this eq other.asInstanceOf[AnyRef]
if (cmp || !nameDebug)
@@ -215,7 +189,7 @@ trait Names extends api.Names with LowPriorityNames {
other match {
case x: String =>
- Console.println("Compared " + debugString + " and String '" + x + "'")
+ Console.println(s"Compared $debugString and String '$x'")
case x: Name =>
if (this.isTermName != x.isTermName) {
val panic = this.toTermName == x.toTermName
@@ -228,6 +202,7 @@ trait Names extends api.Names with LowPriorityNames {
}
false
}
+ ****/
/** @return the i'th Char of this name */
final def charAt(i: Int): Char = chrs(index + i)
@@ -242,7 +217,7 @@ trait Names extends api.Names with LowPriorityNames {
* this name from start, length if not found.
*
* @param c the character
- * @param start ...
+ * @param start the index from which to search
* @return the index of the first occurrence of c
*/
final def pos(c: Char, start: Int): Int = {
@@ -255,7 +230,7 @@ trait Names extends api.Names with LowPriorityNames {
* in this name from start, length if not found.
*
* @param s the string
- * @param start ...
+ * @param start the index from which to search
* @return the index of the first occurrence of s
*/
final def pos(s: String, start: Int): Int = {
@@ -279,13 +254,11 @@ trait Names extends api.Names with LowPriorityNames {
*/
final def lastPos(c: Char): Int = lastPos(c, len - 1)
- final def lastPos(s: String): Int = lastPos(s, len - s.length)
-
/** Returns the index of the last occurrence of char c in this
* name from start, -1 if not found.
*
* @param c the character
- * @param start ...
+ * @param start the index from which to search
* @return the index of the last occurrence of c
*/
final def lastPos(c: Char, start: Int): Int = {
@@ -294,26 +267,6 @@ trait Names extends api.Names with LowPriorityNames {
i
}
- /** Returns the index of the last occurrence of string s in this
- * name from start, -1 if not found.
- *
- * @param s the string
- * @param start ...
- * @return the index of the last occurrence of s
- */
- final def lastPos(s: String, start: Int): Int = {
- var i = lastPos(s.charAt(0), start)
- while (i >= 0) {
- var j = 1;
- while (s.charAt(j) == chrs(index + i + j)) {
- j += 1
- if (j == s.length()) return i;
- }
- i = lastPos(s.charAt(0), i - 1)
- }
- -s.length()
- }
-
/** Does this name start with prefix? */
final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0)
@@ -322,7 +275,7 @@ trait Names extends api.Names with LowPriorityNames {
var i = 0
while (i < prefix.length && start + i < len &&
chrs(index + start + i) == chrs(prefix.start + i))
- i += 1;
+ i += 1
i == prefix.length
}
@@ -334,7 +287,7 @@ trait Names extends api.Names with LowPriorityNames {
var i = 1
while (i <= suffix.length && i <= end &&
chrs(index + end - i) == chrs(suffix.start + suffix.length - i))
- i += 1;
+ i += 1
i > suffix.length
}
@@ -375,7 +328,6 @@ trait Names extends api.Names with LowPriorityNames {
if (idx == length) -1 else idx
}
def lastIndexOf(ch: Char) = lastPos(ch)
- def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex)
/** Replace all occurrences of `from` by `to` in
* name; result is always a term name.
@@ -424,24 +376,25 @@ trait Names extends api.Names with LowPriorityNames {
def append(ch: Char) = newName("" + this + ch)
def append(suffix: String) = newName("" + this + suffix)
def append(suffix: Name) = newName("" + this + suffix)
- def prepend(ch: Char) = newName("" + ch + this)
def prepend(prefix: String) = newName("" + prefix + this)
- def prepend(prefix: Name) = newName("" + prefix + this)
def decodedName: ThisNameType = newName(decode)
- def isOperatorName: Boolean = decode != toString
+ def isOperatorName: Boolean = decode != toString // used by ide
def longString: String = nameKind + " " + decode
def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
}
+ implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name)
implicit def TermNameOps(name: TermName): NameOps[TermName] = new NameOps(name)
implicit def TypeNameOps(name: TypeName): NameOps[TypeName] = new NameOps(name)
+ /** FIXME: This is a good example of something which is pure "value class" but cannot
+ * reap the benefits because an (unused) $outer pointer so it is not single-field.
+ */
final class NameOps[T <: Name](name: T) {
def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T]
def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T]
- def nonEmpty: Boolean = name.length > 0
}
implicit val NameTag = ClassTag[Name](classOf[Name])
@@ -485,7 +438,7 @@ trait Names extends api.Names with LowPriorityNames {
type ThisNameType = TermName
protected[this] def thisName: TermName = this
- var next: TermName = termHashtable(hash)
+ val next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
def isTypeName: Boolean = false
@@ -510,11 +463,16 @@ trait Names extends api.Names with LowPriorityNames {
implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
+ object TermName extends TermNameExtractor {
+ def apply(s: String) = newTermName(s)
+ def unapply(name: TermName): Option[String] = Some(name.toString)
+ }
+
sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
type ThisNameType = TypeName
protected[this] def thisName: TypeName = this
- var next: TypeName = typeHashtable(hash)
+ val next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
def isTermName: Boolean = false
def isTypeName: Boolean = true
@@ -539,4 +497,9 @@ trait Names extends api.Names with LowPriorityNames {
}
implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
+
+ object TypeName extends TypeNameExtractor {
+ def apply(s: String) = newTypeName(s)
+ def unapply(name: TypeName): Option[String] = Some(name.toString)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index faa161d6b1..cc32a0363f 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -1,34 +1,273 @@
package scala.reflect
package internal
+import util._
+import scala.collection.mutable.ListBuffer
+
+/** Handling range positions
+ * atPos, the main method in this trait, will add positions to a tree,
+ * and will ensure the following properties:
+ *
+ * 1. All nodes between the root of the tree and nodes that already have positions
+ * will be assigned positions.
+ * 2. No node which already has a position will be assigned a different range; however
+ * a RangePosition might become a TransparentPosition.
+ * 3. The position of each assigned node includes the positions of each of its children.
+ * 4. The positions of all solid descendants of children of an assigned node
+ * are mutually non-overlapping.
+ *
+ * Here, the solid descendant of a node are:
+ *
+ * If the node has a TransparentPosition, the solid descendants of all its children
+ * Otherwise, the singleton consisting of the node itself.
+ */
trait Positions extends api.Positions { self: SymbolTable =>
type Position = scala.reflect.internal.util.Position
val NoPosition = scala.reflect.internal.util.NoPosition
implicit val PositionTag = ClassTag[Position](classOf[Position])
+ def inform(msg: String): Unit
+
+ def useOffsetPositions: Boolean = true
+
/** A position that wraps a set of trees.
* The point of the wrapping position is the point of the default position.
* If some of the trees are ranges, returns a range position enclosing all ranges
* Otherwise returns default position that is either focused or not.
*/
- def wrappingPos(default: Position, trees: List[Tree]) = wrappingPos(default, trees, true)
- def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = default
+ def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, focus = true)
+ def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
+ if (useOffsetPositions) default else {
+ val ranged = trees filter (_.pos.isRange)
+ if (ranged.isEmpty) if (focus) default.focus else default
+ else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
+ }
+ }
/** A position that wraps the non-empty set of trees.
* The point of the wrapping position is the point of the first trees' position.
* If some of the trees are non-synthetic, returns a range position enclosing the non-synthetic trees
* Otherwise returns a synthetic offset position to point.
*/
- def wrappingPos(trees: List[Tree]): Position = trees.head.pos
+ def wrappingPos(trees: List[Tree]): Position = {
+ val headpos = trees.head.pos
+ if (useOffsetPositions || !headpos.isDefined) headpos
+ else wrappingPos(headpos, trees)
+ }
/** Ensure that given tree has no positions that overlap with
* any of the positions of `others`. This is done by
* shortening the range, assigning TransparentPositions
* to some of the nodes in `tree` or focusing on the position.
*/
- def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) }
- def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {}
+ def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, focus = true) }
+ def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
+ if (useOffsetPositions) return
+
+ def isOverlapping(pos: Position) =
+ pos.isRange && (others exists (pos overlaps _.pos))
+
+ if (isOverlapping(tree.pos)) {
+ val children = tree.children
+ children foreach (ensureNonOverlapping(_, others, focus))
+ if (tree.pos.isOpaqueRange) {
+ val wpos = wrappingPos(tree.pos, children, focus)
+ tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
+ }
+ }
+ }
+
+ def rangePos(source: SourceFile, start: Int, point: Int, end: Int): Position =
+ if (useOffsetPositions) new OffsetPosition(source, point)
+ else new RangePosition(source, start, point, end)
+
+ def validatePositions(tree: Tree) {
+ if (useOffsetPositions) return
+
+ def reportTree(prefix : String, tree : Tree) {
+ val source = if (tree.pos.isDefined) tree.pos.source else ""
+ inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
+ inform("")
+ inform(treeStatus(tree))
+ inform("")
+ }
+
+ def positionError(msg: String)(body : => Unit) {
+ inform("======= Position error\n" + msg)
+ body
+ inform("\nWhile validating #" + tree.id)
+ inform(treeStatus(tree))
+ inform("\nChildren:")
+ tree.children map (t => " " + treeStatus(t, tree)) foreach inform
+ inform("=======")
+ throw new ValidateException(msg)
+ }
+
+ def validate(tree: Tree, encltree: Tree): Unit = {
+
+ if (!tree.isEmpty && tree.canHaveAttrs) {
+ if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
+ println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
+
+ if (!tree.pos.isDefined)
+ positionError("Unpositioned tree #"+tree.id) {
+ inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
+ inform("%15s %s".format("enclosing", treeStatus(encltree)))
+ encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
+ }
+ if (tree.pos.isRange) {
+ if (!encltree.pos.isRange)
+ positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
+ reportTree("Enclosing", encltree)
+ reportTree("Enclosed", tree)
+ }
+ if (!(encltree.pos includes tree.pos))
+ positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
+ reportTree("Enclosing", encltree)
+ reportTree("Enclosed", tree)
+ }
+
+ findOverlapping(tree.children flatMap solidDescendants) match {
+ case List() => ;
+ case xs => {
+ positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
+ reportTree("Ancestor", tree)
+ for((x, y) <- xs) {
+ reportTree("First overlapping", x)
+ reportTree("Second overlapping", y)
+ }
+ }
+ }
+ }
+ }
+ for (ct <- tree.children flatMap solidDescendants) validate(ct, tree)
+ }
+ }
+
+ if (!isPastTyper)
+ validate(tree, tree)
+ }
+
+ def solidDescendants(tree: Tree): List[Tree] =
+ if (tree.pos.isTransparent) tree.children flatMap solidDescendants
+ else List(tree)
+
+ /** A free range from `lo` to `hi` */
+ private def free(lo: Int, hi: Int): Range =
+ Range(new RangePosition(null, lo, lo, hi), EmptyTree)
+
+ /** The maximal free range */
+ private lazy val maxFree: Range = free(0, Int.MaxValue)
+
+ /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
+ private def maybeFree(lo: Int, hi: Int) =
+ if (lo < hi) List(free(lo, hi))
+ else List()
+
+ /** Insert `pos` into ranges `rs` if possible;
+ * otherwise add conflicting trees to `conflicting`.
+ */
+ private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match {
+ case List() =>
+ assert(conflicting.nonEmpty)
+ rs
+ case r :: rs1 =>
+ assert(!t.pos.isTransparent)
+ if (r.isFree && (r.pos includes t.pos)) {
+// println("subdividing "+r+"/"+t.pos)
+ maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
+ } else {
+ if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
+ r :: insert(rs1, t, conflicting)
+ }
+ }
+
+ /** Replace elem `t` of `ts` by `replacement` list. */
+ private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] =
+ if (ts.head == t) replacement ::: ts.tail
+ else ts.head :: replace(ts.tail, t, replacement)
+
+ /** Does given list of trees have mutually non-overlapping positions?
+ * pre: None of the trees is transparent
+ */
+ def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = {
+ var ranges = List(maxFree)
+ for (ct <- cts) {
+ if (ct.pos.isOpaqueRange) {
+ val conflicting = new ListBuffer[Tree]
+ ranges = insert(ranges, ct, conflicting)
+ if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct))
+ }
+ }
+ List()
+ }
+
+ /** Set position of all children of a node
+ * @param pos A target position.
+ * Uses the point of the position as the point of all positions it assigns.
+ * Uses the start of this position as an Offset position for unpositioed trees
+ * without children.
+ * @param trees The children to position. All children must be positionable.
+ */
+ private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
+ for (tree <- trees) {
+ if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ val children = tree.children
+ if (children.isEmpty) {
+ tree setPos pos.focus
+ } else {
+ setChildrenPos(pos, children)
+ tree setPos wrappingPos(pos, children)
+ }
+ }
+ }
+ } catch {
+ case ex: Exception =>
+ println("error while set children pos "+pos+" of "+trees)
+ throw ex
+ }
+
+
+ class ValidateException(msg : String) extends Exception(msg)
+
+
+ /** A locator for trees with given positions.
+ * Given a position `pos`, locator.apply returns
+ * the smallest tree that encloses `pos`.
+ */
+ class Locator(pos: Position) extends Traverser {
+ var last: Tree = _
+ def locateIn(root: Tree): Tree = {
+ this.last = EmptyTree
+ traverse(root)
+ this.last
+ }
+ protected def isEligible(t: Tree) = !t.pos.isTransparent
+ override def traverse(t: Tree) {
+ t match {
+ case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
+ traverse(tt.original)
+ case _ =>
+ if (t.pos includes pos) {
+ if (isEligible(t)) last = t
+ super.traverse(t)
+ } else t match {
+ case mdef: MemberDef =>
+ traverseTrees(mdef.mods.annotations)
+ case _ =>
+ }
+ }
+ }
+ }
+
+ case class Range(pos: Position, tree: Tree) {
+ def isFree = tree == EmptyTree
+ }
+
+ class TypedLocator(pos: Position) extends Locator(pos) {
+ override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
+ }
trait PosAssigner extends Traverser {
var pos: Position
@@ -38,7 +277,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
protected class DefaultPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
- if (t eq EmptyTree) ()
+ if (!t.canHaveAttrs) ()
else if (t.pos == NoPosition) {
t.setPos(pos)
super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if?
@@ -57,9 +296,25 @@ trait Positions extends api.Positions { self: SymbolTable =>
}
}
+ /** Position a tree.
+ * This means: Set position of a node and position all its unpositioned children.
+ */
def atPos[T <: Tree](pos: Position)(tree: T): T = {
- posAssigner.pos = pos
- posAssigner.traverse(tree)
- tree
+ if (useOffsetPositions || !pos.isOpaqueRange) {
+ posAssigner.pos = pos
+ posAssigner.traverse(tree)
+ tree
+ }
+ else {
+ if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ tree.setPos(pos)
+ val children = tree.children
+ if (children.nonEmpty) {
+ if (children.tail.isEmpty) atPos(pos)(children.head)
+ else setChildrenPos(pos, children)
+ }
+ }
+ tree
+ }
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 80d247c0ea..28837c4ae8 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -25,8 +25,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
if (nme.keywords(term) && term != nme.USCOREkw) "`%s`" format s
else s
}
- def quotedName(name: Name): String = quotedName(name, false)
- def quotedName(name: String): String = quotedName(newTermName(name), false)
+ def quotedName(name: Name): String = quotedName(name, decode = false)
+ def quotedName(name: String): String = quotedName(newTermName(name), decode = false)
private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
val sym = tree.symbol
@@ -43,8 +43,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
}
- def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
- def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
+ def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = true)
+ def symName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = false)
/** Turns a path into a String, introducing backquotes
* as necessary.
@@ -67,12 +67,12 @@ trait Printers extends api.Printers { self: SymbolTable =>
printIds = settings.uniqid.value
printKinds = settings.Yshowsymkinds.value
printMirrors = false // typically there's no point to print mirrors inside the compiler, as there is only one mirror there
- protected def doPrintPositions = settings.Xprintpos.value
+ printPositions = settings.Xprintpos.value
def indent() = indentMargin += indentStep
def undent() = indentMargin -= indentStep
- def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show)
+ def printPosition(tree: Tree) = if (printPositions) print(tree.pos.show)
def println() {
out.println()
@@ -91,8 +91,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
def printColumn(ts: List[Tree], start: String, sep: String, end: String) {
- print(start); indent; println()
- printSeq(ts){print(_)}{print(sep); println()}; undent; println(); print(end)
+ print(start); indent(); println()
+ printSeq(ts){print(_)}{print(sep); println()}; undent(); println(); print(end)
}
def printRow(ts: List[Tree], start: String, sep: String, end: String) {
@@ -168,7 +168,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
)
def printFlags(flags: Long, privateWithin: String) {
- var mask: Long = if (settings.debug.value) -1L else PrintableFlags
+ val mask: Long = if (settings.debug.value) -1L else PrintableFlags
val s = flagsToString(flags & mask, privateWithin)
if (s != "") print(s + " ")
}
@@ -208,7 +208,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case ModuleDef(mods, name, impl) =>
printAnnotations(tree)
- printModifiers(tree, mods);
+ printModifiers(tree, mods)
print("object " + symName(tree, name), " extends ", impl)
case ValDef(mods, name, tp, rhs) =>
@@ -327,10 +327,10 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(lhs, " = ", rhs)
case If(cond, thenp, elsep) =>
- print("if (", cond, ")"); indent; println()
- print(thenp); undent
+ print("if (", cond, ")"); indent(); println()
+ print(thenp); undent()
if (!elsep.isEmpty) {
- println(); print("else"); indent; println(); print(elsep); undent
+ println(); print("else"); indent(); println(); print(elsep); undent()
}
case Return(expr) =>
@@ -389,7 +389,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(x.escapedStringValue)
case tt: TypeTree =>
- if ((tree.tpe eq null) || (doPrintPositions && tt.original != null)) {
+ if ((tree.tpe eq null) || (printPositions && tt.original != null)) {
if (tt.original != null) print("<type: ", tt.original, ">")
else print("<type ?>")
} else if ((tree.tpe.typeSymbol ne null) && tree.tpe.typeSymbol.isAnonymousClass) {
@@ -423,7 +423,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
printOpt(" >: ", lo); printOpt(" <: ", hi)
case ExistentialTypeTree(tpt, whereClauses) =>
- print(tpt);
+ print(tpt)
printColumn(whereClauses, " forSome { ", ";", "}")
// SelectFromArray is no longer visible in reflect.internal.
@@ -435,7 +435,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case tree =>
xprintTree(this, tree)
}
- if (printTypes && tree.isTerm && !tree.isEmpty) {
+ if (printTypes && tree.isTerm && tree.canHaveAttrs) {
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
}
}
@@ -475,8 +475,6 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
- def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
- def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
// provides footnotes for types and mirrors
import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
@@ -525,7 +523,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
private var depth = 0
private var printTypesInFootnotes = true
private var printingFootnotes = false
- private var footnotes = footnoteIndex.mkFootnotes()
+ private val footnotes = footnoteIndex.mkFootnotes()
def print(args: Any*): Unit = {
// don't print type footnotes if the argument is a mere type
@@ -542,14 +540,17 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(")")
case EmptyTree =>
print("EmptyTree")
- case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
+ case self.emptyValDef =>
print("emptyValDef")
+ case self.pendingSuperCall =>
+ print("pendingSuperCall")
case tree: Tree =>
- val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
- val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
+ val hasSymbolField = tree.hasSymbolField && tree.symbol != NoSymbol
+ val isError = hasSymbolField && tree.symbol.name.toString == nme.ERROR.toString
printProduct(
tree,
preamble = _ => {
+ if (printPositions) print(tree.pos.show)
print(tree.productPrefix)
if (printTypes && tree.tpe != null) print(tree.tpe)
},
@@ -559,7 +560,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
if (isError) print("<")
print(name)
if (isError) print(": error>")
- } else if (hasSymbol) {
+ } else if (hasSymbolField) {
tree match {
case refTree: RefTree =>
if (tree.symbol.name != refTree.name) print("[", tree.symbol, " aka ", refTree.name, "]")
@@ -651,7 +652,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
print("(")
val it = iterable.iterator
while (it.hasNext) {
- body(it.next)
+ body(it.next())
print(if (it.hasNext) ", " else "")
}
print(")")
@@ -672,7 +673,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case nme.CONSTRUCTOR => "nme.CONSTRUCTOR"
case nme.ROOTPKG => "nme.ROOTPKG"
case _ =>
- val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\""
+ val prefix = if (name.isTermName) "TermName(\"" else "TypeName(\""
prefix + name.toString + "\")"
}
diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala
new file mode 100644
index 0000000000..9b99b94b41
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala
@@ -0,0 +1,23 @@
+package scala.reflect
+package internal
+
+import ClassfileConstants._
+
+trait PrivateWithin {
+ self: SymbolTable =>
+
+ def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = {
+ if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
+ // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
+ // apparently occurs when processing v45.3 bytecode.
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
+
+ // protected in java means package protected. #3946
+ if ((jflags & JAVA_ACC_PROTECTED) != 0)
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
+
+ sym
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index 842491d56d..93383f5376 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -4,12 +4,9 @@ package internal
import settings.MutableSettings
trait Required { self: SymbolTable =>
-
def picklerPhase: Phase
-
def settings: MutableSettings
- def forInteractive: Boolean
-
- def forScaladoc: Boolean
+ @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false
+ @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0") def forScaladoc = false
}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index ab3b9b7ed7..850c497d4b 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -8,6 +8,14 @@ package internal
trait Scopes extends api.Scopes { self: SymbolTable =>
+ /** An ADT to represent the results of symbol name lookups.
+ */
+ sealed trait NameLookup { def symbol: Symbol ; def isSuccess = false }
+ case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup { override def isSuccess = true }
+ case class LookupAmbiguous(msg: String) extends NameLookup { def symbol = NoSymbol }
+ case class LookupInaccessible(symbol: Symbol, msg: String) extends NameLookup
+ case object LookupNotFound extends NameLookup { def symbol = NoSymbol }
+
class ScopeEntry(val sym: Symbol, val owner: Scope) {
/** the next entry in the hash bucket
*/
@@ -17,15 +25,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
*/
var next: ScopeEntry = null
+ def depth = owner.nestingLevel
override def hashCode(): Int = sym.name.start
- override def toString(): String = sym.toString()
+ override def toString() = s"$sym (depth=$depth)"
}
- /**
- * @param sym ...
- * @param owner ...
- * @return ...
- */
private def newScopeEntry(sym: Symbol, owner: Scope): ScopeEntry = {
val e = new ScopeEntry(sym, owner)
e.next = owner.elems
@@ -92,8 +96,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** enter a scope entry
- *
- * @param e ...
*/
protected def enterEntry(e: ScopeEntry) {
elemsCache = null
@@ -110,8 +112,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** enter a symbol
- *
- * @param sym ...
*/
def enter[T <: Symbol](sym: T): T = {
enterEntry(newScopeEntry(sym, this))
@@ -119,8 +119,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** enter a symbol, asserting that no symbol with same name exists in scope
- *
- * @param sym ...
*/
def enterUnique(sym: Symbol) {
assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString))
@@ -175,8 +173,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** remove entry
- *
- * @param e ...
*/
def unlink(e: ScopeEntry) {
if (elems == e) {
@@ -192,7 +188,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (e1 == e) {
hashtable(index) = e.tail
} else {
- while (e1.tail != e) e1 = e1.tail;
+ while (e1.tail != e) e1 = e1.tail
e1.tail = e.tail
}
}
@@ -203,19 +199,53 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
def unlink(sym: Symbol) {
var e = lookupEntry(sym.name)
while (e ne null) {
- if (e.sym == sym) unlink(e);
+ if (e.sym == sym) unlink(e)
e = lookupNextEntry(e)
}
}
- /** lookup a symbol
- *
- * @param name ...
- * @return ...
+ /** Lookup a module or a class, filtering out matching names in scope
+ * which do not match that requirement.
+ */
+ def lookupModule(name: Name): Symbol = lookupAll(name.toTermName) find (_.isModule) getOrElse NoSymbol
+ def lookupClass(name: Name): Symbol = lookupAll(name.toTypeName) find (_.isClass) getOrElse NoSymbol
+
+ /** True if the name exists in this scope, false otherwise. */
+ def containsName(name: Name) = lookupEntry(name) != null
+
+ /** Lookup a symbol.
*/
def lookup(name: Name): Symbol = {
val e = lookupEntry(name)
- if (e eq null) NoSymbol else e.sym
+ if (e eq null) NoSymbol
+ else if (lookupNextEntry(e) eq null) e.sym
+ else {
+ // We shouldn't get here: until now this method was picking a random
+ // symbol when there was more than one with the name, so this should
+ // only be called knowing that there are 0-1 symbols of interest. So, we
+ // can safely return an overloaded symbol rather than throwing away the
+ // rest of them. Most likely we still break, but at least we will break
+ // in an understandable fashion (unexpectedly overloaded symbol) rather
+ // than a non-deterministic bizarre one (see any bug involving overloads
+ // in package objects.)
+ val alts = lookupAll(name).toList
+ def alts_s = alts map (s => s.defString) mkString " <and> "
+ devWarning(s"scope lookup of $name found multiple symbols: $alts_s")
+ // FIXME - how is one supposed to create an overloaded symbol without
+ // knowing the correct owner? Using the symbol owner is not correct;
+ // say for instance this is List's scope and the symbols are its three
+ // mkString members. Those symbols are owned by TraversableLike, which
+ // is no more meaningful an owner than NoSymbol given that we're in
+ // List. Maybe it makes no difference who owns the overloaded symbol, in
+ // which case let's establish that and have a canonical creation method.
+ //
+ // FIXME - a similar question for prefix, although there are more
+ // clues from the symbols on that one, as implemented here. In general
+ // the distinct list is one type and lub becomes the identity.
+ // val prefix = lub(alts map (_.info.prefix) distinct)
+ // Now using NoSymbol and NoPrefix always to avoid forcing info (SI-6664)
+ NoSymbol.newOverloaded(NoPrefix, alts)
+ }
}
/** Returns an iterator yielding every symbol with given name in this scope.
@@ -223,7 +253,20 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] {
var e = lookupEntry(name)
def hasNext: Boolean = e ne null
- def next(): Symbol = { val r = e.sym; e = lookupNextEntry(e); r }
+ def next(): Symbol = try e.sym finally e = lookupNextEntry(e)
+ }
+
+ def lookupAllEntries(name: Name): Iterator[ScopeEntry] = new Iterator[ScopeEntry] {
+ var e = lookupEntry(name)
+ def hasNext: Boolean = e ne null
+ def next(): ScopeEntry = try e finally e = lookupNextEntry(e)
+ }
+
+ def lookupUnshadowedEntries(name: Name): Iterator[ScopeEntry] = {
+ lookupEntry(name) match {
+ case null => Iterator.empty
+ case e => lookupAllEntries(name) filter (e1 => (e eq e1) || (e.depth == e1.depth && e.sym != e1.sym))
+ }
}
/** lookup a symbol entry matching given name.
@@ -257,7 +300,7 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (hashtable ne null)
do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name)
else
- do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name);
+ do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name)
e
}
@@ -287,36 +330,16 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
*/
def iterator: Iterator[Symbol] = toList.iterator
-/*
- /** Does this scope contain an entry for `sym`?
- */
- def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym
-
- /** A scope that contains all symbols of this scope and that also contains `sym`.
- */
- def +(sym: Symbol): Scope =
- if (contains(sym)) this
- else {
- val result = cloneScope
- result enter sym
- result
- }
-
- /** A scope that contains all symbols of this scope except `sym`.
- */
- def -(sym: Symbol): Scope =
- if (!contains(sym)) this
- else {
- val result = cloneScope
- result unlink sym
- result
- }
-*/
override def foreach[U](p: Symbol => U): Unit = toList foreach p
- override def filter(p: Symbol => Boolean): Scope =
- if (!(toList forall p)) newScopeWith(toList filter p: _*) else this
-
+ override def filterNot(p: Symbol => Boolean): Scope = (
+ if (toList exists p) newScopeWith(toList filterNot p: _*)
+ else this
+ )
+ override def filter(p: Symbol => Boolean): Scope = (
+ if (toList forall p) this
+ else newScopeWith(toList filter p: _*)
+ )
@deprecated("Use `toList.reverse` instead", "2.10.0")
def reverse: List[Symbol] = toList.reverse
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index b782353ed3..6c5bbc9774 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -29,17 +29,4 @@ trait StdAttachments {
* Therefore we need this hack (see `Reshape.toPreTyperTypeTree` for a detailed explanation).
*/
case class CompoundTypeTreeOriginalAttachment(parents: List[Tree], stats: List[Tree])
-
- /** Is added by the macro engine to the results of macro expansions.
- * Stores the original expandee as it entered the `macroExpand` function.
- */
- case class MacroExpansionAttachment(original: Tree)
-
- /** When present, suppresses macro expansion for the host.
- * This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
- *
- * Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
- * (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
- */
- case object SuppressMacroExpansionAttachment
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index bcda2bc1ae..a894bd649c 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -18,8 +18,6 @@ trait StdNames {
def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str))
- implicit def lowerTermNames(n: TermName): String = n.toString
-
/** Tensions: would like the keywords to be the very first names entered into the names
* storage so their ids count from 0, which simplifies the parser. Switched to abstract
* classes to avoid all the indirection which is generated with implementation-containing
@@ -37,11 +35,7 @@ trait StdNames {
kws = kws + result
result
}
- def result: Set[TermName] = {
- val result = kws
- kws = null
- result
- }
+ def result: Set[TermName] = try kws finally kws = null
}
private final object compactify extends (String => String) {
@@ -104,7 +98,6 @@ trait StdNames {
val IMPORT: NameType = "<import>"
val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
val MODULE_VAR_SUFFIX: NameType = "$module"
- val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING
val PACKAGE: NameType = "package"
val ROOT: NameType = "<root>"
val SPECIALIZED_SUFFIX: NameType = "$sp"
@@ -121,16 +114,12 @@ trait StdNames {
final val Short: NameType = "Short"
final val Unit: NameType = "Unit"
- final val ScalaValueNames: scala.List[NameType] =
- scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
-
// some types whose companions we utilize
final val AnyRef: NameType = "AnyRef"
final val Array: NameType = "Array"
final val List: NameType = "List"
final val Seq: NameType = "Seq"
final val Symbol: NameType = "Symbol"
- final val ClassTag: NameType = "ClassTag"
final val WeakTypeTag: NameType = "WeakTypeTag"
final val TypeTag : NameType = "TypeTag"
final val Expr: NameType = "Expr"
@@ -206,10 +195,11 @@ trait StdNames {
}
abstract class TypeNames extends Keywords with TypeNamesApi {
+ override type NameType = TypeName
+
protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
final val BYNAME_PARAM_CLASS_NAME: NameType = "<byname>"
- final val EQUALS_PATTERN_NAME: NameType = "<equals>"
final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "<repeated...>"
final val LOCAL_CHILD: NameType = "<local child>"
final val REFINE_CLASS_NAME: NameType = "<refinement>"
@@ -220,12 +210,10 @@ trait StdNames {
final val Any: NameType = "Any"
final val AnyVal: NameType = "AnyVal"
- final val ExprApi: NameType = "ExprApi"
final val Mirror: NameType = "Mirror"
final val Nothing: NameType = "Nothing"
final val Null: NameType = "Null"
final val Object: NameType = "Object"
- final val PartialFunction: NameType = "PartialFunction"
final val PrefixType: NameType = "PrefixType"
final val Product: NameType = "Product"
final val Serializable: NameType = "Serializable"
@@ -239,7 +227,6 @@ trait StdNames {
final val Group: NameType = "Group"
final val Tree: NameType = "Tree"
final val Type : NameType = "Type"
- final val TypeTree: NameType = "TypeTree"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -249,13 +236,11 @@ trait StdNames {
// Classfile Attributes
final val AnnotationDefaultATTR: NameType = "AnnotationDefault"
final val BridgeATTR: NameType = "Bridge"
- final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009).
final val CodeATTR: NameType = "Code"
final val ConstantValueATTR: NameType = "ConstantValue"
final val DeprecatedATTR: NameType = "Deprecated"
final val ExceptionsATTR: NameType = "Exceptions"
final val InnerClassesATTR: NameType = "InnerClasses"
- final val LineNumberTableATTR: NameType = "LineNumberTable"
final val LocalVariableTableATTR: NameType = "LocalVariableTable"
final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters)
@@ -272,6 +257,8 @@ trait StdNames {
}
abstract class TermNames extends Keywords with TermNamesApi {
+ override type NameType = TermName
+
protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
/** Base strings from which synthetic names are derived. */
@@ -284,9 +271,6 @@ trait StdNames {
val EXCEPTION_RESULT_PREFIX = "exceptionResult"
val EXPAND_SEPARATOR_STRING = "$$"
val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val INTERPRETER_LINE_PREFIX = "line"
- val INTERPRETER_VAR_PREFIX = "res"
- val INTERPRETER_WRAPPER_SUFFIX = "$object"
val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
val PROTECTED_PREFIX = "protected$"
val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
@@ -305,7 +289,6 @@ trait StdNames {
val LAZY_SLOW_SUFFIX: NameType = "$lzycompute"
val LOCAL_SUFFIX_STRING = " "
val UNIVERSE_BUILD_PREFIX: NameType = "$u.build."
- val UNIVERSE_BUILD: NameType = "$u.build"
val UNIVERSE_PREFIX: NameType = "$u."
val UNIVERSE_SHORT: NameType = "$u"
val MIRROR_PREFIX: NameType = "$m."
@@ -340,7 +323,6 @@ trait StdNames {
def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
- def isSuperAccessorName(name: Name) = name startsWith SUPER_PREFIX_STRING
def isReplWrapperName(name: Name) = name containsName INTERPRETER_IMPORT_WRAPPER
def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
@@ -357,11 +339,6 @@ trait StdNames {
)
}
- def isDeprecatedIdentifierName(name: Name) = name.toTermName match {
- case nme.`then` | nme.`macro` => true
- case _ => false
- }
-
def isOpAssignmentName(name: Name) = name match {
case raw.NE | raw.LE | raw.GE | EMPTY => false
case _ =>
@@ -396,18 +373,6 @@ trait StdNames {
else name
)
- /*
- def anonNumberSuffix(name: Name): Name = {
- ("" + name) lastIndexOf '$' match {
- case -1 => nme.EMPTY
- case idx =>
- val s = name drop idx
- if (s.toString forall (_.isDigit)) s
- else nme.EMPTY
- }
- }
- */
-
/** Return the original name and the types on which this name
* is specialized. For example,
* {{{
@@ -459,18 +424,6 @@ trait StdNames {
} else name.toTermName
}
- // If the name ends with $nn where nn are
- // all digits, strip the $ and the digits.
- // Otherwise return the argument.
- def stripAnonNumberSuffix(name: Name): Name = {
- var pos = name.length
- while (pos > 0 && name.charAt(pos - 1).isDigit)
- pos -= 1
-
- if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name
- else name.subName(0, pos - 1)
- }
-
def stripModuleSuffix(name: Name): Name = (
if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
)
@@ -485,8 +438,6 @@ trait StdNames {
final val Nil: NameType = "Nil"
final val Predef: NameType = "Predef"
- final val ScalaRunTime: NameType = "ScalaRunTime"
- final val Some: NameType = "Some"
val _1 : NameType = "_1"
val _2 : NameType = "_2"
@@ -582,14 +533,10 @@ trait StdNames {
val Annotation: NameType = "Annotation"
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
- val AppliedTypeTree: NameType = "AppliedTypeTree"
- val Apply: NameType = "Apply"
val ArrayAnnotArg: NameType = "ArrayAnnotArg"
- val Constant: NameType = "Constant"
val ConstantType: NameType = "ConstantType"
val EmptyPackage: NameType = "EmptyPackage"
val EmptyPackageClass: NameType = "EmptyPackageClass"
- val ExistentialTypeTree: NameType = "ExistentialTypeTree"
val Flag : NameType = "Flag"
val Ident: NameType = "Ident"
val Import: NameType = "Import"
@@ -598,10 +545,8 @@ trait StdNames {
val Modifiers: NameType = "Modifiers"
val NestedAnnotArg: NameType = "NestedAnnotArg"
val NoFlags: NameType = "NoFlags"
- val NoPrefix: NameType = "NoPrefix"
val NoSymbol: NameType = "NoSymbol"
val Nothing: NameType = "Nothing"
- val NoType: NameType = "NoType"
val Null: NameType = "Null"
val Object: NameType = "Object"
val RootPackage: NameType = "RootPackage"
@@ -611,17 +556,14 @@ trait StdNames {
val StringContext: NameType = "StringContext"
val This: NameType = "This"
val ThisType: NameType = "ThisType"
- val Tree : NameType = "Tree"
val Tuple2: NameType = "Tuple2"
val TYPE_ : NameType = "TYPE"
- val TypeApply: NameType = "TypeApply"
val TypeRef: NameType = "TypeRef"
val TypeTree: NameType = "TypeTree"
val UNIT : NameType = "UNIT"
val add_ : NameType = "add"
val annotation: NameType = "annotation"
val anyValClass: NameType = "anyValClass"
- val append: NameType = "append"
val apply: NameType = "apply"
val applyDynamic: NameType = "applyDynamic"
val applyDynamicNamed: NameType = "applyDynamicNamed"
@@ -629,34 +571,24 @@ trait StdNames {
val args : NameType = "args"
val argv : NameType = "argv"
val arrayClass: NameType = "arrayClass"
- val arrayElementClass: NameType = "arrayElementClass"
- val arrayValue: NameType = "arrayValue"
val array_apply : NameType = "array_apply"
val array_clone : NameType = "array_clone"
val array_length : NameType = "array_length"
val array_update : NameType = "array_update"
- val arraycopy: NameType = "arraycopy"
- val asTerm: NameType = "asTerm"
val asModule: NameType = "asModule"
- val asMethod: NameType = "asMethod"
val asType: NameType = "asType"
- val asClass: NameType = "asClass"
val asInstanceOf_ : NameType = "asInstanceOf"
val asInstanceOf_Ob : NameType = "$asInstanceOf"
- val assert_ : NameType = "assert"
- val assume_ : NameType = "assume"
val box: NameType = "box"
val build : NameType = "build"
val bytes: NameType = "bytes"
val canEqual_ : NameType = "canEqual"
val checkInitialized: NameType = "checkInitialized"
- val ClassManifestFactory: NameType = "ClassManifestFactory"
val classOf: NameType = "classOf"
- val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
+ val clone_ : NameType = "clone"
val conforms: NameType = "conforms"
val copy: NameType = "copy"
val currentMirror: NameType = "currentMirror"
- val definitions: NameType = "definitions"
val delayedInit: NameType = "delayedInit"
val delayedInitArg: NameType = "delayedInit$body"
val drop: NameType = "drop"
@@ -667,30 +599,23 @@ trait StdNames {
val equalsNumChar : NameType = "equalsNumChar"
val equalsNumNum : NameType = "equalsNumNum"
val equalsNumObject : NameType = "equalsNumObject"
- val equals_ : NameType = if (forMSIL) "Equals" else "equals"
+ val equals_ : NameType = "equals"
val error: NameType = "error"
- val eval: NameType = "eval"
val ex: NameType = "ex"
val experimental: NameType = "experimental"
val f: NameType = "f"
val false_ : NameType = "false"
val filter: NameType = "filter"
- val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
+ val finalize_ : NameType = "finalize"
val find_ : NameType = "find"
val flagsFromBits : NameType = "flagsFromBits"
val flatMap: NameType = "flatMap"
val foreach: NameType = "foreach"
- val genericArrayOps: NameType = "genericArrayOps"
val get: NameType = "get"
- val getOrElse: NameType = "getOrElse"
- val hasNext: NameType = "hasNext"
- val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
+ val hashCode_ : NameType = "hashCode"
val hash_ : NameType = "hash"
- val head: NameType = "head"
- val identity: NameType = "identity"
val implicitly: NameType = "implicitly"
val in: NameType = "in"
- val info: NameType = "info"
val inlinedEquals: NameType = "inlinedEquals"
val isArray: NameType = "isArray"
val isDefinedAt: NameType = "isDefinedAt"
@@ -702,57 +627,41 @@ trait StdNames {
val lang: NameType = "lang"
val length: NameType = "length"
val lengthCompare: NameType = "lengthCompare"
- val liftedTree: NameType = "liftedTree"
- val `macro` : NameType = "macro"
- val macroThis : NameType = "_this"
val macroContext : NameType = "c"
val main: NameType = "main"
- val manifest: NameType = "manifest"
- val ManifestFactory: NameType = "ManifestFactory"
val manifestToTypeTag: NameType = "manifestToTypeTag"
val map: NameType = "map"
val materializeClassTag: NameType = "materializeClassTag"
val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
val materializeTypeTag: NameType = "materializeTypeTag"
- val mirror : NameType = "mirror"
val moduleClass : NameType = "moduleClass"
- val name: NameType = "name"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
val newFreeType: NameType = "newFreeType"
val newNestedSymbol: NameType = "newNestedSymbol"
val newScopeWith: NameType = "newScopeWith"
- val next: NameType = "next"
- val nmeNewTermName: NameType = "newTermName"
- val nmeNewTypeName: NameType = "newTypeName"
- val normalize: NameType = "normalize"
val notifyAll_ : NameType = "notifyAll"
val notify_ : NameType = "notify"
val null_ : NameType = "null"
- val ofDim: NameType = "ofDim"
- val origin: NameType = "origin"
+ val pendingSuperCall: NameType = "pendingSuperCall"
val prefix : NameType = "prefix"
val productArity: NameType = "productArity"
val productElement: NameType = "productElement"
val productIterator: NameType = "productIterator"
val productPrefix: NameType = "productPrefix"
val readResolve: NameType = "readResolve"
- val reflect : NameType = "reflect"
val reify : NameType = "reify"
val rootMirror : NameType = "rootMirror"
- val runOrElse: NameType = "runOrElse"
val runtime: NameType = "runtime"
val runtimeClass: NameType = "runtimeClass"
val runtimeMirror: NameType = "runtimeMirror"
- val sameElements: NameType = "sameElements"
val scala_ : NameType = "scala"
val selectDynamic: NameType = "selectDynamic"
val selectOverloadedMethod: NameType = "selectOverloadedMethod"
val selectTerm: NameType = "selectTerm"
val selectType: NameType = "selectType"
val self: NameType = "self"
- val setAccessible: NameType = "setAccessible"
val setAnnotations: NameType = "setAnnotations"
val setSymbol: NameType = "setSymbol"
val setType: NameType = "setType"
@@ -762,21 +671,18 @@ trait StdNames {
val staticModule : NameType = "staticModule"
val staticPackage : NameType = "staticPackage"
val synchronized_ : NameType = "synchronized"
- val tail: NameType = "tail"
- val `then` : NameType = "then"
+ val TermName: NameType = "TermName"
val this_ : NameType = "this"
val thisPrefix : NameType = "thisPrefix"
- val throw_ : NameType = "throw"
val toArray: NameType = "toArray"
- val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
- val toSeq: NameType = "toSeq"
- val toString_ : NameType = if (forMSIL) "ToString" else "toString"
+ val toString_ : NameType = "toString"
val toTypeConstructor: NameType = "toTypeConstructor"
val tpe : NameType = "tpe"
val tree : NameType = "tree"
val true_ : NameType = "true"
val typedProductIterator: NameType = "typedProductIterator"
+ val TypeName: NameType = "TypeName"
val typeTagToManifest: NameType = "typeTagToManifest"
val unapply: NameType = "unapply"
val unapplySeq: NameType = "unapplySeq"
@@ -790,14 +696,9 @@ trait StdNames {
val view_ : NameType = "view"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
- val wrap: NameType = "wrap"
- val zip: NameType = "zip"
-
- val synthSwitch: NameType = "$synthSwitch"
// unencoded operators
object raw {
- final val AMP : NameType = "&"
final val BANG : NameType = "!"
final val BAR : NameType = "|"
final val DOLLAR: NameType = "$"
@@ -806,7 +707,6 @@ trait StdNames {
final val MINUS: NameType = "-"
final val NE: NameType = "!="
final val PLUS : NameType = "+"
- final val SLASH: NameType = "/"
final val STAR : NameType = "*"
final val TILDE: NameType = "~"
@@ -862,14 +762,7 @@ trait StdNames {
// Grouped here so Cleanup knows what tests to perform.
val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE)
- val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
- val NumberOpNames = (
- Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
- ++ Set(UNARY_+, UNARY_-, UNARY_!)
- ++ ConversionNames
- ++ CommonOpNames
- )
val add: NameType = "add"
val complement: NameType = "complement"
@@ -1001,7 +894,6 @@ trait StdNames {
object fulltpnme extends TypeNames {
val RuntimeNothing: NameType = "scala.runtime.Nothing$"
val RuntimeNull: NameType = "scala.runtime.Null$"
- val JavaLangEnum: NameType = "java.lang.Enum"
}
/** Java binary names, like scala/runtime/Nothing$.
@@ -1016,16 +908,11 @@ trait StdNames {
val javanme = nme.javaKeywords
object nme extends TermNames {
-
- def isModuleVarName(name: Name): Boolean =
- stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX
-
def moduleVarName(name: TermName): TermName =
newTermNameCached("" + name + MODULE_VAR_SUFFIX)
def getCause = sn.GetCause
def getClass_ = sn.GetClass
- def getComponentType = sn.GetComponentType
def getMethod_ = sn.GetMethod
def invoke_ = sn.Invoke
@@ -1038,55 +925,12 @@ trait StdNames {
val reflMethodCacheName: NameType = "reflMethod$Cache"
val reflMethodName: NameType = "reflMethod$Method"
- private val reflectionCacheNames = Set[NameType](
- reflPolyCacheName,
- reflClassCacheName,
- reflParamsCacheName,
- reflMethodCacheName,
- reflMethodName
- )
- def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
-
@deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name)
@deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName = tpnme.singletonName(name)
@deprecated("Use a method in tpnme", "2.10.0") def implClassName(name: Name): TypeName = tpnme.implClassName(name)
@deprecated("Use a method in tpnme", "2.10.0") def interfaceName(implname: Name): TypeName = tpnme.interfaceName(implname)
}
- abstract class SymbolNames {
- protected val stringToTermName = null
- protected val stringToTypeName = null
- protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
-
- val BeanProperty : TypeName
- val BooleanBeanProperty : TypeName
- val BoxedBoolean : TypeName
- val BoxedCharacter : TypeName
- val BoxedNumber : TypeName
- val Class : TypeName
- val Delegate : TypeName
- val IOOBException : TypeName // IndexOutOfBoundsException
- val InvTargetException : TypeName // InvocationTargetException
- val JavaSerializable : TypeName
- val MethodAsObject : TypeName
- val NPException : TypeName // NullPointerException
- val Object : TypeName
- val String : TypeName
- val Throwable : TypeName
- val ValueType : TypeName
-
- val ForName : TermName
- val GetCause : TermName
- val GetClass : TermName
- val GetClassLoader : TermName
- val GetComponentType : TermName
- val GetMethod : TermName
- val Invoke : TermName
- val JavaLang : TermName
-
- val Boxed: immutable.Map[TypeName, TypeName]
- }
-
class JavaKeywords {
private val kw = new KeywordSetBuilder
@@ -1144,7 +988,11 @@ trait StdNames {
final val keywords = kw.result
}
- private abstract class JavaNames extends SymbolNames {
+ sealed abstract class SymbolNames {
+ protected val stringToTermName = null
+ protected val stringToTypeName = null
+ protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
+
final val BoxedBoolean: TypeName = "java.lang.Boolean"
final val BoxedByte: TypeName = "java.lang.Byte"
final val BoxedCharacter: TypeName = "java.lang.Character"
@@ -1154,22 +1002,16 @@ trait StdNames {
final val BoxedLong: TypeName = "java.lang.Long"
final val BoxedNumber: TypeName = "java.lang.Number"
final val BoxedShort: TypeName = "java.lang.Short"
- final val Class: TypeName = "java.lang.Class"
- final val Delegate: TypeName = tpnme.NO_NAME
final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException"
final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException"
final val MethodAsObject: TypeName = "java.lang.reflect.Method"
final val NPException: TypeName = "java.lang.NullPointerException"
final val Object: TypeName = "java.lang.Object"
- final val String: TypeName = "java.lang.String"
final val Throwable: TypeName = "java.lang.Throwable"
- final val ValueType: TypeName = tpnme.NO_NAME
- final val ForName: TermName = newTermName("forName")
final val GetCause: TermName = newTermName("getCause")
final val GetClass: TermName = newTermName("getClass")
final val GetClassLoader: TermName = newTermName("getClassLoader")
- final val GetComponentType: TermName = newTermName("getComponentType")
final val GetMethod: TermName = newTermName("getMethod")
final val Invoke: TermName = newTermName("invoke")
final val JavaLang: TermName = newTermName("java.lang")
@@ -1186,52 +1028,5 @@ trait StdNames {
)
}
- private class MSILNames extends SymbolNames {
- final val BeanProperty: TypeName = tpnme.NO_NAME
- final val BooleanBeanProperty: TypeName = tpnme.NO_NAME
- final val BoxedBoolean: TypeName = "System.IConvertible"
- final val BoxedCharacter: TypeName = "System.IConvertible"
- final val BoxedNumber: TypeName = "System.IConvertible"
- final val Class: TypeName = "System.Type"
- final val Delegate: TypeName = "System.MulticastDelegate"
- final val IOOBException: TypeName = "System.IndexOutOfRangeException"
- final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException"
- final val JavaSerializable: TypeName = tpnme.NO_NAME
- final val MethodAsObject: TypeName = "System.Reflection.MethodInfo"
- final val NPException: TypeName = "System.NullReferenceException"
- final val Object: TypeName = "System.Object"
- final val String: TypeName = "System.String"
- final val Throwable: TypeName = "System.Exception"
- final val ValueType: TypeName = "System.ValueType"
-
- final val ForName: TermName = newTermName("GetType")
- final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
- final val GetClass: TermName = newTermName("GetType")
- final lazy val GetClassLoader: TermName = throw new UnsupportedOperationException("Scala reflection is not supported on this platform");
- final val GetComponentType: TermName = newTermName("GetElementType")
- final val GetMethod: TermName = newTermName("GetMethod")
- final val Invoke: TermName = newTermName("Invoke")
- final val JavaLang: TermName = newTermName("System")
-
- val Boxed = immutable.Map[TypeName, TypeName](
- tpnme.Boolean -> "System.Boolean",
- tpnme.Byte -> "System.SByte", // a scala.Byte is signed and a System.SByte too (unlike a System.Byte)
- tpnme.Char -> "System.Char",
- tpnme.Short -> "System.Int16",
- tpnme.Int -> "System.Int32",
- tpnme.Long -> "System.Int64",
- tpnme.Float -> "System.Single",
- tpnme.Double -> "System.Double"
- )
- }
-
- private class J2SENames extends JavaNames {
- final val BeanProperty: TypeName = "scala.beans.BeanProperty"
- final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty"
- final val JavaSerializable: TypeName = "java.io.Serializable"
- }
-
- lazy val sn: SymbolNames =
- if (forMSIL) new MSILNames
- else new J2SENames
+ lazy val sn: SymbolNames = new SymbolNames { }
}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 5ccf81b4b5..03ec59f0fe 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -15,6 +15,7 @@ abstract class SymbolTable extends macros.Universe
with Names
with Symbols
with Types
+ with Variances
with Kinds
with ExistentialsAndSkolems
with FlagSets
@@ -38,6 +39,7 @@ abstract class SymbolTable extends macros.Universe
with StdAttachments
with StdCreators
with BuildUtils
+ with PrivateWithin
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
@@ -49,24 +51,28 @@ abstract class SymbolTable extends macros.Universe
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
def shouldLogAtThisPhase = false
+ def isPastTyper = false
@deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
+ @deprecated("Use devWarning if this is really a warning; otherwise use log", "2.11.0")
+ def debugwarn(msg: => String): Unit = devWarning(msg)
+
/** Override with final implementation for inlining. */
def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
- def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
+ def devWarning(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
def throwableAsString(t: Throwable): String = "" + t
/** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
- def debugStack(t: Throwable): Unit = debugwarn(throwableAsString(t))
+ def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t))
/** Overridden when we know more about what was happening during a failure. */
def supplementErrorMessage(msg: String): String = msg
private[scala] def printCaller[T](msg: String)(result: T) = {
Console.err.println("%s: %s\nCalled from: %s".format(msg, result,
- (new Throwable).getStackTrace.drop(2).take(15).mkString("\n")))
+ (new Throwable).getStackTrace.drop(2).take(50).mkString("\n")))
result
}
@@ -81,6 +87,16 @@ abstract class SymbolTable extends macros.Universe
result
}
@inline
+ final private[scala] def debuglogResult[T](msg: => String)(result: T): T = {
+ debuglog(msg + ": " + result)
+ result
+ }
+ @inline
+ final private[scala] def devWarningResult[T](msg: => String)(result: T): T = {
+ devWarning(msg + ": " + result)
+ result
+ }
+ @inline
final private[scala] def logResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
if (cond(result))
log(msg + ": " + result)
@@ -113,12 +129,6 @@ abstract class SymbolTable extends macros.Universe
@elidable(elidable.WARNING)
def assertCorrectThread() {}
- /** Are we compiling for Java SE? */
- // def forJVM: Boolean
-
- /** Are we compiling for .NET? */
- def forMSIL: Boolean = false
-
/** A last effort if symbol in a select <owner>.<name> is not found.
* This is overridden by the reflection compiler to make up a package
* when it makes sense (i.e. <owner> is a package and <name> is a term name).
@@ -139,7 +149,7 @@ abstract class SymbolTable extends macros.Universe
type RunId = Int
final val NoRunId = 0
- // sigh, this has to be public or atPhase doesn't inline.
+ // sigh, this has to be public or enteringPhase doesn't inline.
var phStack: List[Phase] = Nil
private[this] var ph: Phase = NoPhase
private[this] var per = NoPeriod
@@ -182,9 +192,6 @@ abstract class SymbolTable extends macros.Universe
/** The phase identifier of the given period. */
final def phaseId(period: Period): Phase#Id = period & 0xFF
- /** The period at the start of run that includes `period`. */
- final def startRun(period: Period): Period = period & 0xFFFFFF00
-
/** The current period. */
final def currentPeriod: Period = {
//assert(per == (currentRunId << 8) + phase.id)
@@ -202,23 +209,17 @@ abstract class SymbolTable extends macros.Universe
p != NoPhase && phase.id > p.id
/** Perform given operation at given phase. */
- @inline final def atPhase[T](ph: Phase)(op: => T): T = {
+ @inline final def enteringPhase[T](ph: Phase)(op: => T): T = {
val saved = pushPhase(ph)
try op
finally popPhase(saved)
}
+ @inline final def exitingPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph.next)(op)
+ @inline final def enteringPrevPhase[T](op: => T): T = enteringPhase(phase.prev)(op)
- /** Since when it is to be "at" a phase is inherently ambiguous,
- * a couple unambiguously named methods.
- */
- @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op)
- @inline final def afterPhase[T](ph: Phase)(op: => T): T = atPhase(ph.next)(op)
- @inline final def afterCurrentPhase[T](op: => T): T = atPhase(phase.next)(op)
- @inline final def beforePrevPhase[T](op: => T): T = atPhase(phase.prev)(op)
-
- @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T =
- if (isAtPhaseAfter(target)) atPhase(target)(op) else op
+ @inline final def enteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
+ if (isAtPhaseAfter(target)) enteringPhase(target)(op) else op
final def isValid(period: Period): Boolean =
period != 0 && runId(period) == currentRunId && {
@@ -231,7 +232,7 @@ abstract class SymbolTable extends macros.Universe
def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = (
it.pid >= limit ||
!it.changesBaseClasses && noChangeInBaseClasses(it.next, limit)
- );
+ )
period != 0 && runId(period) == currentRunId && {
val pid = phaseId(period)
if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id)
@@ -303,7 +304,6 @@ abstract class SymbolTable extends macros.Universe
object perRunCaches {
import java.lang.ref.WeakReference
- import scala.runtime.ScalaRunTime.stringOf
import scala.collection.generic.Clearable
// Weak references so the garbage collector will take care of
@@ -346,26 +346,15 @@ abstract class SymbolTable extends macros.Universe
*/
def isCompilerUniverse = false
+ @deprecated("Use enteringPhase", "2.10.0")
+ @inline final def atPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph)(op)
+ @deprecated("Use enteringPhaseNotLaterThan", "2.10.0")
+ @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T = enteringPhaseNotLaterThan(target)(op)
+
/**
* Adds the `sm` String interpolator to a [[scala.StringContext]].
*/
implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps
-
- def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = {
- import ClassfileConstants._
- if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
- // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
- // apparently occurs when processing v45.3 bytecode.
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
-
- // protected in java means package protected. #3946
- if ((jflags & JAVA_ACC_PROTECTED) != 0)
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
-
- sym
- }
}
object SymbolTableStats {
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index d9eb48ff2d..6837f37445 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -8,10 +8,11 @@ package internal
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
-import util.Statistics
+import util.{ Statistics, shortClassOfInstance }
import Flags._
import scala.annotation.tailrec
-import scala.reflect.io.AbstractFile
+import scala.reflect.io.{ AbstractFile, NoAbstractFile }
+import Variance._
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
@@ -19,8 +20,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
protected var ids = 0
- val emptySymbolArray = new Array[Symbol](0)
-
protected def nextId() = { ids += 1; ids }
/** Used for deciding in the IDE whether we can interrupt the compiler */
@@ -70,6 +69,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
+ // TODO - don't allow the owner to be changed without checking invariants, at least
+ // when under some flag. Define per-phase invariants for owner/owned relationships,
+ // e.g. after flatten all classes are owned by package classes, there are lots and
+ // lots of these to be declared (or more realistically, discovered.)
+ protected def saveOriginalOwner(sym: Symbol) {
+ if (originalOwner contains sym) ()
+ else originalOwner(sym) = sym.rawowner
+ }
+ protected def originalEnclosingMethod(sym: Symbol): Symbol = {
+ if (sym.isMethod || sym == NoSymbol) sym
+ else {
+ val owner = originalOwner.getOrElse(sym, sym.rawowner)
+ if (sym.isLocalDummy) owner.enclClass.primaryConstructor
+ else originalEnclosingMethod(owner)
+ }
+ }
+
abstract class SymbolContextApiImpl extends SymbolContextApi {
this: Symbol =>
@@ -171,10 +187,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def debugFlagString: String = flagString(AllFlags)
/** String representation of symbol's variance */
- def varianceString: String =
- if (variance == 1) "+"
- else if (variance == -1) "-"
- else ""
+ def varianceString: String = variance.symbolicString
override def flagMask =
if (settings.debug.value && !isAbstractType) AllFlags
@@ -186,7 +199,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (isGADTSkolem) " (this is a GADT skolem)"
else ""
- def shortSymbolClass = getClass.getName.split('.').last.stripPrefix("Symbols$")
+ def shortSymbolClass = shortClassOfInstance(this)
def symbolCreationString: String = (
"%s%25s | %-40s | %s".format(
if (settings.uniqid.value) "%06d | ".format(id) else "",
@@ -249,20 +262,29 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newImport(pos: Position): TermSymbol =
newTermSymbol(nme.IMPORT, pos)
+ def newModuleVarSymbol(accessor: Symbol): TermSymbol = {
+ val newName = nme.moduleVarName(accessor.name.toTermName)
+ val newFlags = MODULEVAR | ( if (this.isClass) PrivateLocal | SYNTHETIC else 0 )
+ val newInfo = accessor.tpe.finalResultType
+ val mval = newVariable(newName, accessor.pos.focus, newFlags) addAnnotation VolatileAttr
+
+ if (this.isClass)
+ mval setInfoAndEnter newInfo
+ else
+ mval setInfo newInfo
+ }
+
final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
final def newModuleAndClassSymbol(name: Name, pos: Position, flags0: FlagSet): (ModuleSymbol, ClassSymbol) = {
val flags = flags0 | MODULE
- val m = newModuleSymbol(name, pos, flags)
+ val m = newModuleSymbol(name.toTermName, pos, flags)
val c = newModuleClass(name.toTypeName, pos, flags & ModuleToClassFlags)
connectModuleToClass(m, c)
(m, c)
}
- final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
- newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
-
final def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
newClassSymbol(name, pos, newFlags).asInstanceOf[ModuleClassSymbol]
@@ -325,11 +347,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
() => { cnt += 1; nme.syntheticParamName(cnt) }
}
- /** Synthetic value parameters when parameter symbols are not available
- */
- final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] =
- argtypess map (xs => newSyntheticValueParams(xs, freshNamer))
-
/** Synthetic value parameters when parameter symbols are not available.
* Calling this method multiple times will re-use the same parameter names.
*/
@@ -345,7 +362,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol =
newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype
- def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L)
def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
@@ -409,14 +425,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newRefinementClass(pos: Position): RefinementClassSymbol =
createRefinementClassSymbol(pos, 0L)
- /** Create a new getter for current symbol (which must be a field)
- */
- final def newGetter: MethodSymbol = (
- owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags))
- setPrivateWithin privateWithin
- setInfo MethodType(Nil, tpe)
- )
-
final def newErrorSymbol(name: Name): Symbol = name match {
case x: TypeName => newErrorClass(x)
case x: TermName => newErrorValue(x)
@@ -532,14 +540,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def isContravariant = false
def isCovariant = false
- def isExistentialQuantified = false
def isExistentialSkolem = false
def isExistentiallyBound = false
def isGADTSkolem = false
def isTypeParameter = false
def isTypeParameterOrSkolem = false
def isTypeSkolem = false
- def isTypeMacro = false
def isInvariant = !isCovariant && !isContravariant
/** Qualities of Terms, always false for TypeSymbols.
@@ -589,13 +595,24 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def isEffectiveRoot = false
+ /** Can this symbol only be subclassed by bottom classes? This is assessed
+ * to be the case if it is final, and any type parameters are invariant.
+ */
+ def hasOnlyBottomSubclasses = {
+ def loop(tparams: List[Symbol]): Boolean = tparams match {
+ case Nil => true
+ case x :: xs => x.variance.isInvariant && loop(xs)
+ }
+ isClass && isFinal && loop(typeParams)
+ }
+
final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
final def isOverridableMember = !(isClass || isEffectivelyFinal) && (this ne NoSymbol) && owner.isClass
/** Does this symbol denote a wrapper created by the repl? */
final def isInterpreterWrapper = (
(this hasFlag MODULE)
- && owner.isPackageClass
+ && isTopLevel
&& nme.isReplWrapperName(name)
)
final def getFlag(mask: Long): Long = {
@@ -646,7 +663,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isStaticModule = isModule && isStatic && !isMethod
final def isThisSym = isTerm && owner.thisSym == this
final def isError = hasFlag(IS_ERROR)
- final def isErroneous = isError || isInitialized && tpe.isErroneous
+ final def isErroneous = isError || isInitialized && tpe_*.isErroneous
def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
@@ -658,7 +675,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
- isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isTermMacro
+ isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro
final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
final def isDefinedInPackage = effectiveOwner.isPackageClass
@@ -713,10 +730,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
- def isSerializable = (
- info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
- || hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
- )
+ def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
def hasBridgeAnnotation = hasAnnotation(BridgeClass)
def isDeprecated = hasAnnotation(DeprecatedAttr)
def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
@@ -726,14 +740,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
= hasAnnotation(DeprecatedInheritanceAttr)
def deprecatedInheritanceMessage
= getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
- def deprecatedInheritanceVersion
- = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1)
def hasDeprecatedOverridingAnnotation
= hasAnnotation(DeprecatedOverridingAttr)
def deprecatedOverridingMessage
= getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
- def deprecatedOverridingVersion
- = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1)
// !!! when annotation arguments are not literal strings, but any sort of
// assembly of strings, there is a fair chance they will turn up here not as
@@ -813,19 +823,28 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isStaticOwner: Boolean =
isPackageClass || isModuleClass && isStatic
- def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass
+ /** A helper function for isEffectivelyFinal. */
+ private def isNotOverridden = (
+ owner.isClass && (
+ owner.isEffectivelyFinal
+ || owner.isSealed && owner.children.forall(c => c.isEffectivelyFinal && (overridingSymbol(c) == NoSymbol))
+ )
+ )
/** Is this symbol effectively final? I.e, it cannot be overridden */
final def isEffectivelyFinal: Boolean = (
(this hasFlag FINAL | PACKAGE)
- || isModuleOrModuleClass && (owner.isPackageClass || !settings.overrideObjects.value)
+ || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects.value)
|| isTerm && (
isPrivate
|| isLocal
- || owner.isClass && owner.isEffectivelyFinal
- )
+ || isNotOverridden
+ )
)
+ /** Is this symbol owned by a package? */
+ final def isTopLevel = owner.isPackageClass
+
/** Is this symbol locally defined? I.e. not accessed from outside `this` instance */
final def isLocal: Boolean = owner.isTerm
@@ -843,41 +862,29 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def isLocalClass = false
- def isStableClass = false
-
-/* code for fixing nested objects
- override final def isModuleClass: Boolean =
- super.isModuleClass && !isExpandedModuleClass
-*/
/** Is this class or type defined as a structural refinement type?
*/
final def isStructuralRefinement: Boolean =
- (isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
+ (isClass || isType || isModule) && info.dealiasWiden/*.underlying*/.isStructuralRefinement
/** Is this a term symbol only defined in a refinement (so that it needs
* to be accessed by reflection)?
*/
- def isOnlyRefinementMember: Boolean =
+ def isOnlyRefinementMember: Boolean = (
isTerm && // type members are not affected
owner.isRefinementClass && // owner must be a refinement class
(owner.info decl name) == this && // symbol must be explicitly declared in the refinement (not synthesized from glb)
- allOverriddenSymbols.isEmpty && // symbol must not override a symbol in a base class
+ !isOverridingSymbol && // symbol must not override a symbol in a base class
!isConstant // symbol must not be a constant. Question: Can we exclude @inline methods as well?
+ )
final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic
final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol
- /** Is this symbol a member of class `clazz`? */
- def isMemberOf(clazz: Symbol) =
- clazz.info.member(name).alternatives contains this
-
/** A a member of class `base` is incomplete if
* (1) it is declared deferred or
* (2) it is abstract override and its super symbol in `base` is
* nonexistent or incomplete.
- *
- * @param base ...
- * @return ...
*/
final def isIncompleteIn(base: Symbol): Boolean =
this.isDeferred ||
@@ -888,7 +895,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// Does not always work if the rawInfo is a SourcefileLoader, see comment
// in "def coreClassesFirst" in Global.
- def exists = !owner.isPackageClass || { rawInfo.load(this); rawInfo != NoType }
+ def exists = !isTopLevel || { rawInfo.load(this); rawInfo != NoType }
final def isInitialized: Boolean =
validTo != NoPeriod
@@ -914,14 +921,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (isAliasType) return true
if (isType && isNonClassType) return false
if (isRefinementClass) return false
- return true
+ true
}
- /** The variance of this symbol as an integer */
- final def variance: Int =
- if (isCovariant) 1
- else if (isContravariant) -1
- else 0
+ /** The variance of this symbol. */
+ def variance: Variance =
+ if (isCovariant) Covariant
+ else if (isContravariant) Contravariant
+ else Invariant
/** The sequence number of this parameter symbol among all type
* and value parameters of symbol's owner. -1 if symbol does not
@@ -950,18 +957,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
rawowner
}
+ // Like owner, but NoSymbol.owner == NoSymbol instead of throwing an exception.
+ final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
// lots of these to be declared (or more realistically, discovered.)
def owner_=(owner: Symbol) {
- // don't keep the original owner in presentation compiler runs
- // (the map will grow indefinitely, and the only use case is the
- // backend).
- if (!forInteractive) {
- if (originalOwner contains this) ()
- else originalOwner(this) = rawowner
- }
+ saveOriginalOwner(this)
assert(isCompilerUniverse, "owner_= is not thread-safe; cannot be run in reflexive code")
if (traceSymbolActivity)
traceSymbols.recordNewSymbolOwner(this, owner)
@@ -1080,9 +1084,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags
- protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
- new TermSymbol(this, pos, name) initFlags newFlags
-
protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
new MethodSymbol(this, pos, name) initFlags newFlags
@@ -1185,20 +1186,61 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- /** Get type. The type of a symbol is:
- * for a type symbol, the type corresponding to the symbol itself,
- * @M you should use tpeHK for a type symbol with type parameters if
- * the kind of the type need not be *, as tpe introduces dummy arguments
- * to generate a type of kind *
- * for a term symbol, its usual type.
- * See the tpe/tpeHK overrides in TypeSymbol for more.
+ /** The "type" of this symbol. The type of a term symbol is its usual
+ * type. A TypeSymbol is more complicated; see that class for elaboration.
+ * Since tpe forwards to tpe_*, if you call it on a type symbol with unapplied
+ * type parameters, the type returned will contain dummies types. These will
+ * hide legitimate errors or create spurious ones if used as normal types.
*
* For type symbols, `tpe` is different than `info`. `tpe` returns a typeRef
* to the type symbol, `info` returns the type information of the type symbol,
* e.g. a ClassInfoType for classes or a TypeBounds for abstract types.
*/
- def tpe: Type = info
- def tpeHK: Type = tpe
+ final def tpe: Type = tpe_*
+
+ /** typeConstructor throws an exception when called on term
+ * symbols; this is a more forgiving alternative. Calls
+ * typeConstructor on TypeSymbols, returns info otherwise.
+ */
+ def tpeHK: Type = info
+
+ /** Only applicable to TypeSymbols, it is the type corresponding
+ * to the symbol itself. For instance, the type of a List might
+ * be List[Int] - the same symbol's typeConstructor is simply List.
+ * One might be tempted to write that as List[_], and in some
+ * contexts this is possible, but it is discouraged because it is
+ * syntactically indistinguishable from and easily confused with the
+ * type List[T] forSome { type T; }, which can also be written List[_].
+ */
+ def typeConstructor: Type = (
+ // Avoiding a third override in NoSymbol to preserve bimorphism
+ if (this eq NoSymbol)
+ abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
+ else
+ abort("typeConstructor inapplicable for " + this)
+ )
+
+ /** The type of this symbol, guaranteed to be of kind *.
+ * If there are unapplied type parameters, they will be
+ * substituted with dummy type arguments derived from the
+ * type parameters. Such types are not valid in a general
+ * sense and will cause difficult-to-find bugs if allowed
+ * to roam free.
+ *
+ * If you call tpe_* explicitly to obtain these types,
+ * you are responsible for them as if it they were your own
+ * minor children.
+ */
+ def tpe_* : Type = info
+
+ // Alternate implementation of def tpe for warning about misuse,
+ // disabled to keep the method maximally hotspot-friendly:
+ // def tpe: Type = {
+ // val result = tpe_*
+ // if (settings.debug.value && result.typeArgs.nonEmpty)
+ // printCaller(s"""Call to ${this.tpe} created $result: call tpe_* or tpeHK""")("")
+ // result
+ // }
/** Get type info associated with symbol at current phase, after
* ensuring that symbol is initialized (i.e. type is completed).
@@ -1234,13 +1276,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
cnt += 1
// allow for two completions:
// one: sourceCompleter to LazyType, two: LazyType to completed type
- if (cnt == 3) abort("no progress in completing " + this + ":" + tp)
+ if (cnt == 3) abort(s"no progress in completing $this: $tp")
}
rawInfo
}
catch {
case ex: CyclicReference =>
- debugwarn("... hit cycle trying to complete " + this.fullLocationString)
+ devWarning("... hit cycle trying to complete " + this.fullLocationString)
throw ex
}
@@ -1252,9 +1294,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
/** Set initial info. */
- def setInfo(info: Type): this.type = { info_=(info); this }
+ def setInfo(info: Type): this.type = { info_=(info); this }
/** Modifies this symbol's info in place. */
- def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
+ def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
/** Substitute second list of symbols for first in current info. */
def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type =
if (syms0.isEmpty) this
@@ -1365,6 +1407,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (!isInitialized) info
this
}
+ def maybeInitialize = {
+ try { initialize ; true }
+ catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false }
+ }
/** Called when the programmer requests information that might require initialization of the underlying symbol.
*
@@ -1407,14 +1453,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
!isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask)
/** Was symbol's type updated during given phase? */
- final def isUpdatedAt(pid: Phase#Id): Boolean = {
- assert(isCompilerUniverse)
- var infos = this.infos
- while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev
- infos ne null
- }
-
- /** Was symbol's type updated during given phase? */
final def hasTypeAt(pid: Phase#Id): Boolean = {
assert(isCompilerUniverse)
var infos = this.infos
@@ -1427,21 +1465,18 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* This is done in checkAccessible and overriding checks in refchecks
* We can't do this on class loading because it would result in infinite cycles.
*/
- final def cookJavaRawInfo() {
- if (hasFlag(TRIEDCOOKING)) return else setFlag(TRIEDCOOKING) // only try once...
- val oldInfo = info
- doCookJavaRawInfo()
- }
-
- protected def doCookJavaRawInfo(): Unit
+ def cookJavaRawInfo(): Unit = {
+ // only try once...
+ if (this hasFlag TRIEDCOOKING)
+ return
- /** The type constructor of a symbol is:
- * For a type symbol, the type corresponding to the symbol itself,
- * excluding parameters.
- * Not applicable for term symbols.
- */
- def typeConstructor: Type =
- abort("typeConstructor inapplicable for " + this)
+ this setFlag TRIEDCOOKING
+ info // force the current info
+ if (isJavaDefined || isType && owner.isJavaDefined)
+ this modifyInfo rawToExistential
+ else if (isOverloaded)
+ alternatives withFilter (_.isJavaDefined) foreach (_ modifyInfo rawToExistential)
+ }
/** The logic approximately boils down to finding the most recent phase
* which immediately follows any of parser, namer, typer, or erasure.
@@ -1465,7 +1500,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def unsafeTypeParams: List[Symbol] =
if (isMonomorphicType) Nil
- else atPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
+ else enteringPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
/** The type parameters of this symbol.
* assumption: if a type starts out as monomorphic, it will not acquire
@@ -1477,9 +1512,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// analogously to the "info" getter, here we allow for two completions:
// one: sourceCompleter to LazyType, two: LazyType to completed type
if (validTo == NoPeriod)
- atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+ enteringPhase(phaseOf(infos.validFrom))(rawInfo load this)
if (validTo == NoPeriod)
- atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+ enteringPhase(phaseOf(infos.validFrom))(rawInfo load this)
rawInfo.typeParams
}
@@ -1654,12 +1689,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def filter(cond: Symbol => Boolean): Symbol =
if (isOverloaded) {
- val alts = alternatives
- val alts1 = alts filter cond
- if (alts1 eq alts) this
+ var changed = false
+ var alts0: List[Symbol] = alternatives
+ var alts1: List[Symbol] = Nil
+
+ while (alts0.nonEmpty) {
+ if (cond(alts0.head))
+ alts1 ::= alts0.head
+ else
+ changed = true
+
+ alts0 = alts0.tail
+ }
+
+ if (!changed) this
else if (alts1.isEmpty) NoSymbol
else if (alts1.tail.isEmpty) alts1.head
- else owner.newOverloaded(info.prefix, alts1)
+ else owner.newOverloaded(info.prefix, alts1.reverse)
}
else if (cond(this)) this
else NoSymbol
@@ -1740,10 +1786,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def thisSym: Symbol = this
/** The type of `this` in a class, or else the type of the symbol itself. */
- def typeOfThis = thisSym.tpe
+ def typeOfThis = thisSym.tpe_*
- /** If symbol is a class, the type <code>this.type</code> in this class,
- * otherwise <code>NoPrefix</code>.
+ /** If symbol is a class, the type `this.type` in this class,
+ * otherwise `NoPrefix`.
* We always have: thisType <:< typeOfThis
*/
def thisType: Type = NoPrefix
@@ -1877,15 +1923,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* originalOwner map is not populated for memory considerations (the symbol
* may hang on to lazy types and in turn to whole (outdated) compilation units.
*/
- def originalEnclosingMethod: Symbol = {
- assert(!forInteractive, "originalOwner is not kept in presentation compiler runs.")
- if (isMethod) this
- else {
- val owner = originalOwner.getOrElse(this, rawowner)
- if (isLocalDummy) owner.enclClass.primaryConstructor
- else owner.originalEnclosingMethod
- }
- }
+ def originalEnclosingMethod: Symbol = Symbols.this.originalEnclosingMethod(this)
/** The method or class which logically encloses the current symbol.
* If the symbol is defined in the initialization part of a template
@@ -1922,7 +1960,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The top-level class containing this symbol. */
def enclosingTopLevelClass: Symbol =
- if (owner.isPackageClass) {
+ if (isTopLevel) {
if (isClass) this else moduleClass
} else owner.enclosingTopLevelClass
@@ -1931,11 +1969,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
(this.rawInfo ne NoType)
&& (this.effectiveOwner == that.effectiveOwner)
&& ( !this.effectiveOwner.isPackageClass
- || (this.sourceFile eq null)
- || (that.sourceFile eq null)
- || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization
- || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath)
- )
+ || (this.associatedFile eq NoAbstractFile)
+ || (that.associatedFile eq NoAbstractFile)
+ || (this.associatedFile.path == that.associatedFile.path) // Cheap possibly wrong check, then expensive normalization
+ || (this.associatedFile.canonicalPath == that.associatedFile.canonicalPath)
+ )
)
/** The internal representation of classes and objects:
@@ -2031,70 +2069,111 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @param ofclazz The class containing the symbol's definition
* @param site The base type from which member types are computed
*/
- final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol = {
- //OPT cut down on #closures by special casing non-overloaded case
- // was: ofclazz.info.nonPrivateDecl(name) filter (sym =>
- // !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
- val result = ofclazz.info.nonPrivateDecl(name)
- def qualifies(sym: Symbol) = !sym.isTerm || (site.memberType(this) matches site.memberType(sym))
- if ((result eq NoSymbol) || !result.isOverloaded && qualifies(result)) result
- else result filter qualifies
- }
+ final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol =
+ matchingSymbolInternal(site, ofclazz.info nonPrivateDecl name)
/** The non-private member of `site` whose type and name match the type of this symbol. */
final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
- site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
- !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
+ matchingSymbolInternal(site, site.nonPrivateMemberAdmitting(name, admit))
+
+ private def matchingSymbolInternal(site: Type, candidate: Symbol): Symbol = {
+ def qualifies(sym: Symbol) = !sym.isTerm || ((site memberType this) matches (site memberType sym))
+ //OPT cut down on #closures by special casing non-overloaded case
+ if (candidate.isOverloaded) candidate filter qualifies
+ else if (qualifies(candidate)) candidate
+ else NoSymbol
+ }
- /** The symbol, in class `ofclazz`, that is overridden by this symbol.
+ /** The symbol, in class `baseClass`, that is overridden by this symbol.
*
- * @param ofclazz is a base class of this symbol's owner.
+ * @param baseClass is a base class of this symbol's owner.
*/
- final def overriddenSymbol(ofclazz: Symbol): Symbol =
- if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, owner.thisType)
+ final def overriddenSymbol(baseClass: Symbol): Symbol = (
+ // concrete always overrides abstract, so don't let an abstract definition
+ // claim to be overriding an inherited concrete one.
+ matchingInheritedSymbolIn(baseClass) filter (res => res.isDeferred || !this.isDeferred)
+ )
+
+ private def matchingInheritedSymbolIn(baseClass: Symbol): Symbol =
+ if (canMatchInheritedSymbols) matchingSymbol(baseClass, owner.thisType) else NoSymbol
/** The symbol overriding this symbol in given subclass `ofclazz`.
*
* @param ofclazz is a subclass of this symbol's owner
*/
- final def overridingSymbol(ofclazz: Symbol): Symbol =
- if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType)
+ final def overridingSymbol(ofclazz: Symbol): Symbol = (
+ if (canMatchInheritedSymbols)
+ matchingSymbol(ofclazz, ofclazz.thisType)
+ else
+ NoSymbol
+ )
+
+ /** If false, this symbol cannot possibly participate in an override,
+ * either as overrider or overridee. For internal use; you should consult
+ * with isOverridingSymbol. This is used by isOverridingSymbol to escape
+ * the recursive knot.
+ */
+ private def canMatchInheritedSymbols = (
+ (this ne NoSymbol)
+ && owner.isClass
+ && !this.isClass
+ && !this.isConstructor
+ )
+
+ // All the symbols overridden by this symbol and this symbol at the head,
+ // or Nil if this is NoSymbol.
+ def overrideChain = (
+ if (this eq NoSymbol) Nil
+ else if (isOverridingSymbol) this :: allOverriddenSymbols
+ else this :: Nil
+ )
- /** Returns all symbols overriden by this symbol. */
- final def allOverriddenSymbols: List[Symbol] =
- if (!owner.isClass) Nil
- else owner.ancestors map overriddenSymbol filter (_ != NoSymbol)
+ /** Returns all symbols overridden by this symbol. */
+ final def allOverriddenSymbols: List[Symbol] = {
+ def loop(xs: List[Symbol]): List[Symbol] = xs match {
+ case Nil => Nil
+ case x :: xs =>
+ overriddenSymbol(x) match {
+ case NoSymbol => loop(xs)
+ case sym => sym :: loop(xs)
+ }
+ }
+ if (isOverridingSymbol) loop(owner.ancestors) else Nil
+ }
/** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */
- // !!! When if ever will this answer differ from .isOverride?
- // How/where is the OVERRIDE flag managed, as compared to how checks
- // based on type membership will evaluate?
- def isOverridingSymbol = owner.isClass && (
- owner.ancestors exists (cls => matchingSymbol(cls, owner.thisType) != NoSymbol)
+ lazy val isOverridingSymbol = (
+ canMatchInheritedSymbols
+ && owner.ancestors.exists(base => overriddenSymbol(base) != NoSymbol)
)
+
/** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */
def nextOverriddenSymbol: Symbol = {
- if (owner.isClass) owner.ancestors foreach { base =>
- val sym = overriddenSymbol(base)
- if (sym != NoSymbol)
- return sym
+ @tailrec def loop(bases: List[Symbol]): Symbol = bases match {
+ case Nil => NoSymbol
+ case base :: rest =>
+ val sym = overriddenSymbol(base)
+ if (sym == NoSymbol) loop(rest) else sym
}
- NoSymbol
+ if (isOverridingSymbol) loop(owner.ancestors) else NoSymbol
}
/** Returns all symbols overridden by this symbol, plus all matching symbols
* defined in parents of the selftype.
*/
- final def extendedOverriddenSymbols: List[Symbol] =
- if (!owner.isClass) Nil
- else owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
+ final def extendedOverriddenSymbols: List[Symbol] = (
+ if (canMatchInheritedSymbols)
+ owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
+ else
+ Nil
+ )
/** The symbol accessed by a super in the definition of this symbol when
* seen from class `base`. This symbol is always concrete.
* pre: `this.owner` is in the base class sequence of `base`.
*/
final def superSymbol(base: Symbol): Symbol = {
- var bcs = base.info.baseClasses.dropWhile(owner != _).tail
+ var bcs = base.info.baseClasses dropWhile (owner != _) drop 1
var sym: Symbol = NoSymbol
while (!bcs.isEmpty && sym == NoSymbol) {
if (!bcs.head.isImplClass)
@@ -2116,7 +2195,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
/** The setter of this value or getter definition, or NoSymbol if none exists */
- final def setter(base: Symbol): Symbol = setter(base, false)
+ final def setter(base: Symbol): Symbol = setter(base, hasExpandedName = false)
final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
var sname = nme.getterToSetter(nme.getterName(name.toTermName))
@@ -2179,29 +2258,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case p :: _ => p
case _ => NoSymbol
}
-/* code for fixing nested objects
- def expandModuleClassName() {
- name = newTypeName(name.toString + "$")
- }
- def isExpandedModuleClass: Boolean = name(name.length - 1) == '$'
-*/
+ // Desire to re-use the field in ClassSymbol which stores the source
+ // file to also store the classfile, but without changing the behavior
+ // of sourceFile (which is expected at least in the IDE only to
+ // return actual source code.) So sourceFile has classfiles filtered out.
+ final def sourceFile: AbstractFile =
+ if ((associatedFile eq NoAbstractFile) || (associatedFile.path endsWith ".class")) null else associatedFile
- /** Desire to re-use the field in ClassSymbol which stores the source
- * file to also store the classfile, but without changing the behavior
- * of sourceFile (which is expected at least in the IDE only to
- * return actual source code.) So sourceFile has classfiles filtered out.
+ /** Overridden in ModuleSymbols to delegate to the module class.
+ * Never null; if there is no associated file, returns NoAbstractFile.
*/
- private def sourceFileOnly(file: AbstractFile): AbstractFile =
- if ((file eq null) || (file.path endsWith ".class")) null else file
-
- private def binaryFileOnly(file: AbstractFile): AbstractFile =
- if ((file eq null) || !(file.path endsWith ".class")) null else file
-
- final def binaryFile: AbstractFile = binaryFileOnly(associatedFile)
- final def sourceFile: AbstractFile = sourceFileOnly(associatedFile)
-
- /** Overridden in ModuleSymbols to delegate to the module class. */
def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile
def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) }
@@ -2222,9 +2289,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ toString -------------------------------------------------------------------
- /** A tag which (in the ideal case) uniquely identifies class symbols */
- final def tag: Int = fullName.##
-
/** The simple name of this Symbol */
final def simpleName: Name = name
@@ -2251,7 +2315,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
private def symbolKind: SymbolKind = {
var kind =
- if (isTermMacro) ("macro method", "macro method", "MAC")
+ if (isTermMacro) ("term macro", "macro method", "MACM")
else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE")
else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY")
else if (isPackage) ("package", "package", "PK")
@@ -2455,6 +2519,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isMethod = this hasFlag METHOD
override def isModule = this hasFlag MODULE
override def isOverloaded = this hasFlag OVERLOADED
+ /*** !!! TODO: shouldn't we do something like the following:
+ override def isOverloaded = (
+ if (this.isInitialized)
+ this hasFlag OVERLOADED
+ else
+ (infos ne null) && infos.info.isInstanceOf[OverloadedType]
+ )
+ ***/
override def isPackage = this hasFlag PACKAGE
override def isValueParameter = this hasFlag PARAM
@@ -2553,36 +2625,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
name = nme.expandedName(name.toTermName, base)
}
}
-
- protected def doCookJavaRawInfo() {
- def cook(sym: Symbol) {
- require(sym.isJavaDefined, sym)
- // @M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
- // object rawToExistentialInJava extends TypeMap {
- // def apply(tp: Type): Type = tp match {
- // // any symbol that occurs in a java sig, not just java symbols
- // // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
- // case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
- // val eparams = typeParamsToExistentials(sym, sym.typeParams)
- // existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
- // case _ =>
- // mapOver(tp)
- // }
- // }
- val tpe1 = rawToExistential(sym.tpe)
- // println("cooking: "+ sym +": "+ sym.tpe +" to "+ tpe1)
- if (tpe1 ne sym.tpe) {
- sym.setInfo(tpe1)
- }
- }
-
- if (isJavaDefined)
- cook(this)
- else if (isOverloaded)
- for (sym2 <- alternatives)
- if (sym2.isJavaDefined)
- cook(sym2)
- }
}
implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol])
@@ -2669,11 +2711,28 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
type TypeOfClonedSymbol = TypeSymbol
+ override def variance = if (hasLocalFlag) Bivariant else info.typeSymbol.variance
+ override def isContravariant = variance.isContravariant
+ override def isCovariant = variance.isCovariant
final override def isAliasType = true
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSymbol =
owner.newNonClassSymbol(name, pos, newFlags)
}
+ /** Let's say you have a type definition
+ *
+ * {{{
+ * type T <: Number
+ * }}}
+ *
+ * and tsym is the symbol corresponding to T. Then
+ *
+ * {{{
+ * tsym is an instance of AbstractTypeSymbol
+ * tsym.info == TypeBounds(Nothing, Number)
+ * tsym.tpe == TypeRef(NoPrefix, T, List())
+ * }}}
+ */
class AbstractTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
type TypeOfClonedSymbol = TypeSymbol
@@ -2702,7 +2761,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def asNameType(n: Name) = n.toTypeName
override def isNonClassType = true
- override def isTypeMacro = hasFlag(MACRO)
override def resolveOverloadedFlag(flag: Long) = flag match {
case TRAIT => "<trait>" // DEFAULTPARAM
@@ -2720,7 +2778,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isAbstractType = this hasFlag DEFERRED
override def isContravariant = this hasFlag CONTRAVARIANT
override def isCovariant = this hasFlag COVARIANT
- override def isExistentialQuantified = isExistentiallyBound && !isSkolem
override def isExistentiallyBound = this hasFlag EXISTENTIAL
override def isTypeParameter = isTypeParameterOrSkolem && !isSkolem
override def isTypeParameterOrSkolem = this hasFlag PARAM
@@ -2742,63 +2799,57 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def newPrefix = if (this hasFlag EXISTENTIAL | PARAM) NoPrefix else owner.thisType
private def newTypeRef(targs: List[Type]) = typeRef(newPrefix, this, targs)
- /** Let's say you have a type definition
+ /** A polymorphic type symbol has two distinct "types":
*
- * {{{
- * type T <: Number
- * }}}
+ * tpe_* a TypeRef with: dummy type args, no unapplied type parameters, and kind *
+ * tpeHK a TypeRef with: no type args, unapplied type parameters, and
+ * kind (*,*,...,*) => * depending on the number of tparams.
*
- * and tsym is the symbol corresponding to T. Then
- *
- * {{{
- * tsym.info = TypeBounds(Nothing, Number)
- * tsym.tpe = TypeRef(NoPrefix, T, List())
- * }}}
- */
- override def tpe: Type = {
- if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor)
- if (tpePeriod != currentPeriod) {
- if (isValid(tpePeriod)) {
- tpePeriod = currentPeriod
- } else {
- if (isInitialized) tpePeriod = currentPeriod
- tpeCache = NoType
- val targs =
- if (phase.erasedTypes && this != ArrayClass) List()
- else unsafeTypeParams map (_.typeConstructor)
- //@M! use typeConstructor to generate dummy type arguments,
- // sym.tpe should not be called on a symbol that's supposed to be a higher-kinded type
- // memberType should be used instead, that's why it uses tpeHK and not tpe
- tpeCache = newTypeRef(targs)
- }
- }
- assert(tpeCache ne null/*, "" + this + " " + phase*/)//debug
+ * The dummy type args in tpe_* are created by wrapping a TypeRef
+ * around the type parameter symbols. Types containing dummies will
+ * hide errors or introduce spurious ones if they are passed around
+ * as if normal types. They should only be used in local operations
+ * where they will either be discarded immediately after, or will
+ * undergo substitution in which the dummies are replaced by actual
+ * type arguments.
+ */
+ override def tpe_* : Type = {
+ maybeUpdateTypeCache()
tpeCache
}
-
- /** @M -- tpe vs tpeHK:
- *
- * tpe: creates a TypeRef with dummy type arguments and kind *
- * tpeHK: creates a TypeRef with no type arguments but with type parameters
- *
- * If typeParams is nonEmpty, calling tpe may hide errors or
- * introduce spurious ones. (For example, when deriving a type from
- * the symbol of a type argument that may be higher-kinded.) As far
- * as I can tell, it only makes sense to call tpe in conjunction
- * with a substitution that replaces the generated dummy type
- * arguments by their actual types.
- *
- * TODO: the above conditions desperately need to be enforced by code.
- */
- override def tpeHK = typeConstructor // @M! used in memberType
-
override def typeConstructor: Type = {
+ maybeUpdateTyconCache()
+ tyconCache
+ }
+ override def tpeHK: Type = typeConstructor
+
+ private def maybeUpdateTyconCache() {
if ((tyconCache eq null) || tyconRunId != currentRunId) {
tyconCache = newTypeRef(Nil)
tyconRunId = currentRunId
}
assert(tyconCache ne null)
- tyconCache
+ }
+ private def maybeUpdateTypeCache() {
+ if (tpePeriod != currentPeriod) {
+ if (isValid(tpePeriod))
+ tpePeriod = currentPeriod
+ else
+ updateTypeCache() // perform the actual update
+ }
+ }
+ private def updateTypeCache() {
+ if (tpeCache eq NoType)
+ throw CyclicReference(this, typeConstructor)
+
+ if (isInitialized)
+ tpePeriod = currentPeriod
+
+ tpeCache = NoType // cycle marker
+ tpeCache = newTypeRef(
+ if (phase.erasedTypes && this != ArrayClass || unsafeTypeParams.isEmpty) Nil
+ else unsafeTypeParams map (_.typeConstructor)
+ )
}
override def info_=(tp: Type) {
@@ -2824,15 +2875,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* public class Test1<T extends Test3> {}
* info for T in Test1 should be >: Nothing <: Test3[_]
*/
- protected def doCookJavaRawInfo() {
- if (isJavaDefined || owner.isJavaDefined) {
- val tpe1 = rawToExistential(info)
- // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
- if (tpe1 ne info) {
- setInfo(tpe1)
- }
- }
- }
if (Statistics.hotEnabled) Statistics.incCounter(typeSymbolCount)
}
@@ -2866,7 +2908,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isTypeSkolem = this hasFlag PARAM
override def isAbstractType = this hasFlag DEFERRED
- override def isExistentialQuantified = false
override def existentialBound = if (isAbstractType) this.info else super.existentialBound
/** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */
@@ -2925,7 +2966,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isAnonymousClass = name containsName tpnme.ANON_CLASS_NAME
override def isConcreteClass = !(this hasFlag ABSTRACT | TRAIT)
override def isJavaInterface = hasAllFlags(JAVA | TRAIT)
- override def isNestedClass = !owner.isPackageClass
+ override def isNestedClass = !isTopLevel
override def isNumericValueClass = definitions.isNumericValueClass(this)
override def isNumeric = isNumericValueClass
override def isPackageObjectClass = isModuleClass && (name == tpnme.PACKAGE)
@@ -2951,23 +2992,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isLocalClass = (
isAnonOrRefinementClass
|| isLocal
- || !owner.isPackageClass && owner.isLocalClass
+ || !isTopLevel && owner.isLocalClass
)
- override def isStableClass = (this hasFlag STABLE) || checkStable()
-
- private def checkStable() = {
- def hasNoAbstractTypeMember(clazz: Symbol): Boolean =
- (clazz hasFlag STABLE) || {
- var e = clazz.info.decls.elems
- while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym))
- e = e.next
- e == null
- }
- (info.baseClasses forall hasNoAbstractTypeMember) && {
- setFlag(STABLE)
- true
- }
- }
override def enclClassChain = this :: owner.enclClassChain
@@ -2994,7 +3020,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (c.isOverloaded) c.alternatives.head else c
}
- override def associatedFile = if (owner.isPackageClass) _associatedFile else super.associatedFile
+ override def associatedFile = (
+ if (!isTopLevel) super.associatedFile
+ else if (_associatedFile eq null) NoAbstractFile // guarantee not null, but save cost of initializing the var
+ else _associatedFile
+ )
override def associatedFile_=(f: AbstractFile) { _associatedFile = f }
override def reset(completer: Type): this.type = {
@@ -3043,9 +3073,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
clone.typeOfThis = typeOfThis
clone.thisSym setName thisSym.name
}
- if (_associatedFile ne null)
- clone.associatedFile = _associatedFile
-
+ clone.associatedFile = _associatedFile
clone
}
@@ -3153,6 +3181,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
}
trait StubSymbol extends Symbol {
+ devWarning("creating stub symbol to defer error: " + missingMessage)
+
protected def missingMessage: String
/** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */
@@ -3180,8 +3210,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def info = fail(NoType)
override def rawInfo = fail(NoType)
override def companionSymbol = fail(NoSymbol)
-
- debugwarn("creating stub symbol to defer error: " + missingMessage)
}
class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
@@ -3231,7 +3259,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclosingTopLevelClass: Symbol = this
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
- override def associatedFile = null
+ override def associatedFile = NoAbstractFile
override def ownerChain: List[Symbol] = List()
override def ownersIterator: Iterator[Symbol] = Iterator.empty
override def alternatives: List[Symbol] = List()
@@ -3239,15 +3267,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def info: Type = NoType
override def existentialBound: Type = NoType
override def rawInfo: Type = NoType
- protected def doCookJavaRawInfo() {}
override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
override def originalEnclosingMethod = this
override def owner: Symbol =
abort("no-symbol does not have an owner")
- override def typeConstructor: Type =
- abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
}
protected def makeNoSymbol: NoSymbol = new NoSymbol
@@ -3352,7 +3377,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ----- Hoisted closures and convenience methods, for compile time reductions -------
private[scala] final val symbolIsPossibleInRefinement = (sym: Symbol) => sym.isPossibleInRefinement
- private[scala] final val symbolIsNonVariant = (sym: Symbol) => sym.variance == 0
@tailrec private[scala] final
def allSymbolsHaveOwner(syms: List[Symbol], owner: Symbol): Boolean = syms match {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index c1753fc5a1..b2269e476f 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -11,10 +11,7 @@ abstract class TreeGen extends macros.TreeBuilder {
def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
- def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass
- def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass
- def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass
- def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass
+ def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass // used in ide
def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
val cls = if (abstractFun)
@@ -116,7 +113,7 @@ abstract class TreeGen extends macros.TreeBuilder {
}
/** Builds a reference to given symbol with given stable prefix. */
- def mkAttributedRef(pre: Type, sym: Symbol): Tree = {
+ def mkAttributedRef(pre: Type, sym: Symbol): RefTree = {
val qual = mkAttributedQualifier(pre)
qual match {
case EmptyTree => mkAttributedIdent(sym)
@@ -126,19 +123,21 @@ abstract class TreeGen extends macros.TreeBuilder {
}
/** Builds a reference to given symbol. */
- def mkAttributedRef(sym: Symbol): Tree =
+ def mkAttributedRef(sym: Symbol): RefTree =
if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
else mkAttributedIdent(sym)
- /** Builds an untyped reference to given symbol. */
- def mkUnattributedRef(sym: Symbol): Tree =
- if (sym.owner.isClass) Select(This(sym.owner), sym)
- else Ident(sym)
+ def mkUnattributedRef(sym: Symbol): RefTree = mkUnattributedRef(sym.fullNameAsName('.'))
+
+ def mkUnattributedRef(fullName: Name): RefTree = {
+ val hd :: tl = nme.segments(fullName.toString, assumeTerm = fullName.isTermName)
+ tl.foldLeft(Ident(hd): RefTree)(Select(_,_))
+ }
/** Replaces tree type with a stable type if possible */
- def stabilize(tree: Tree): Tree = {
- for(tp <- stableTypeFor(tree)) tree.tpe = tp
- tree
+ def stabilize(tree: Tree): Tree = stableTypeFor(tree) match {
+ case Some(tp) => tree setType tp
+ case _ => tree
}
/** Computes stable type for a tree if possible */
@@ -161,13 +160,13 @@ abstract class TreeGen extends macros.TreeBuilder {
def mkAttributedStableRef(sym: Symbol): Tree =
stabilize(mkAttributedRef(sym))
- def mkAttributedThis(sym: Symbol): Tree =
+ def mkAttributedThis(sym: Symbol): This =
This(sym.name.toTypeName) setSymbol sym setType sym.thisType
- def mkAttributedIdent(sym: Symbol): Tree =
- Ident(sym.name) setSymbol sym setType sym.tpe
+ def mkAttributedIdent(sym: Symbol): RefTree =
+ Ident(sym.name) setSymbol sym setType sym.tpeHK
- def mkAttributedSelect(qual: Tree, sym: Symbol): Tree = {
+ def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree = {
// Tests involving the repl fail without the .isEmptyPackage condition.
if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
mkAttributedIdent(sym)
@@ -213,7 +212,7 @@ abstract class TreeGen extends macros.TreeBuilder {
mkTypeApply(mkAttributedSelect(target, method), targs map TypeTree)
private def mkSingleTypeApply(value: Tree, tpe: Type, what: Symbol, wrapInApply: Boolean) = {
- val tapp = mkAttributedTypeApply(value, what, tpe.normalize :: Nil)
+ val tapp = mkAttributedTypeApply(value, what, tpe.dealias :: Nil)
if (wrapInApply) Apply(tapp, Nil) else tapp
}
private def typeTestSymbol(any: Boolean) = if (any) Any_isInstanceOf else Object_isInstanceOf
@@ -248,10 +247,6 @@ abstract class TreeGen extends macros.TreeBuilder {
Literal(Constant(tp)) setType ConstantType(Constant(tp))
/** Builds a list with given head and tail. */
- def mkNewCons(head: Tree, tail: Tree): Tree =
- New(Apply(mkAttributedRef(ConsClass), List(head, tail)))
-
- /** Builds a list with given head and tail. */
def mkNil: Tree = mkAttributedRef(NilModule)
/** Builds a tree representing an undefined local, as in
@@ -276,6 +271,10 @@ abstract class TreeGen extends macros.TreeBuilder {
case _ => Constant(null)
}
+ /** Wrap an expression in a named argument. */
+ def mkNamedArg(name: Name, tree: Tree): Tree = mkNamedArg(Ident(name), tree)
+ def mkNamedArg(lhs: Tree, rhs: Tree): Tree = atPos(rhs.pos)(AssignOrNamedArg(lhs, rhs))
+
/** Builds a tuple */
def mkTuple(elems: List[Tree]): Tree =
if (elems.isEmpty) Literal(Constant())
@@ -295,4 +294,8 @@ abstract class TreeGen extends macros.TreeBuilder {
assert(ReflectRuntimeUniverse != NoSymbol)
mkAttributedRef(ReflectRuntimeUniverse) setType singleType(ReflectRuntimeUniverse.owner.thisPrefix, ReflectRuntimeUniverse)
}
+
+ def mkPackageDef(packageName: String, stats: List[Tree]): PackageDef = {
+ PackageDef(mkUnattributedRef(newTermName(packageName)), stats)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 3e74b5d22d..e96fcc90df 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -65,6 +65,9 @@ abstract class TreeInfo {
false
}
+ // TODO SI-5304 tighten this up so we don't elide side effect in module loads
+ def isQualifierSafeToElide(tree: Tree): Boolean = isExprSafeToInline(tree)
+
/** Is tree an expression which can be inlined without affecting program semantics?
*
* Note that this is not called "isExprPure" since purity (lack of side-effects)
@@ -108,80 +111,69 @@ abstract class TreeInfo {
false
}
- @deprecated("Use isExprSafeToInline instead", "2.10.0")
- def isPureExpr(tree: Tree) = isExprSafeToInline(tree)
+ /** As if the name of the method didn't give it away,
+ * this logic is designed around issuing helpful
+ * warnings and minimizing spurious ones. That means
+ * don't reuse it for important matters like inlining
+ * decisions.
+ */
+ def isPureExprForWarningPurposes(tree: Tree) = tree match {
+ case EmptyTree | Literal(Constant(())) => false
+ case _ =>
+ def isWarnableRefTree = tree match {
+ case t: RefTree => isExprSafeToInline(t.qualifier) && t.symbol != null && t.symbol.isAccessor
+ case _ => false
+ }
+ def isWarnableSymbol = {
+ val sym = tree.symbol
+ (sym == null) || !(sym.isModule || sym.isLazy) || {
+ debuglog("'Pure' but side-effecting expression in statement position: " + tree)
+ false
+ }
+ }
- def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] =
- mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg)))
+ ( !tree.isErrorTyped
+ && (isExprSafeToInline(tree) || isWarnableRefTree)
+ && isWarnableSymbol
+ )
+ }
def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = {
val b = List.newBuilder[R]
foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg))
- b.result
+ b.result()
}
def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = {
val plen = params.length
val alen = args.length
def fail() = {
- global.debugwarn(
- "Mismatch trying to zip method parameters and argument list:\n" +
- " params = " + params + "\n" +
- " args = " + args + "\n"
- )
+ global.devWarning(
+ s"""|Mismatch trying to zip method parameters and argument list:
+ | params = $params
+ | args = $args""".stripMargin)
false
}
if (plen == alen) foreach2(params, args)(f)
- else if (params.isEmpty) return fail
+ else if (params.isEmpty) return fail()
else if (isVarArgsList(params)) {
val plenInit = plen - 1
if (alen == plenInit) {
if (alen == 0) Nil // avoid calling mismatched zip
else foreach2(params.init, args)(f)
}
- else if (alen < plenInit) return fail
+ else if (alen < plenInit) return fail()
else {
foreach2(params.init, args take plenInit)(f)
val remainingArgs = args drop plenInit
foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f)
}
}
- else return fail
+ else return fail()
true
}
- /**
- * Selects the correct parameter list when there are nested applications.
- * Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
- * lists. To choose the correct one before uncurry, we have to unwrap any
- * applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args
- * correspond to the third parameter list.
- *
- * The argument fn is the function part of the apply node being considered.
- *
- * Also accounts for varargs.
- */
- private def applyMethodParameters(fn: Tree): List[Symbol] = {
- val depth = dissectApplied(fn).applyDepth
- // There could be applies which go beyond the parameter list(s),
- // being applied to the result of the method call.
- // !!! Note that this still doesn't seem correct, although it should
- // be closer than what it replaced.
- if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
- else if (fn.symbol.paramss.isEmpty) Nil
- else fn.symbol.paramss.last
- }
-
- def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match {
- case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args)
- case _ => Nil
- }
- def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
- case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
- case _ =>
- }
-
/** Is symbol potentially a getter of a variable?
*/
def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
@@ -325,10 +317,6 @@ abstract class TreeInfo {
case x: Ident => !x.isBackquoted && nme.isVariableName(x.name)
case _ => false
}
- def isDeprecatedIdentifier(tree: Tree): Boolean = tree match {
- case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name)
- case _ => false
- }
/** The first constructor definitions in `stats` */
def firstConstructor(stats: List[Tree]): Tree = stats find {
@@ -346,6 +334,9 @@ abstract class TreeInfo {
def preSuperFields(stats: List[Tree]): List[ValDef] =
stats collect { case vd: ValDef if isEarlyValDef(vd) => vd }
+ def hasUntypedPreSuperFields(stats: List[Tree]): Boolean =
+ preSuperFields(stats) exists (_.tpt.isEmpty)
+
def isEarlyDef(tree: Tree) = tree match {
case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
@@ -384,15 +375,17 @@ abstract class TreeInfo {
case _ => false
}
+ /** Translates an Assign(_, _) node to AssignOrNamedArg(_, _) if
+ * the lhs is a simple ident. Otherwise returns unchanged.
+ */
+ def assignmentToMaybeNamedArg(tree: Tree) = tree match {
+ case t @ Assign(id: Ident, rhs) => atPos(t.pos)(AssignOrNamedArg(id, rhs))
+ case t => t
+ }
+
/** Is name a left-associative operator? */
def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':')
- /** Is tree a `this` node which belongs to `enclClass`? */
- def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match {
- case This(_) => tree.symbol == enclClass
- case _ => false
- }
-
/** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */
def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass
@@ -445,6 +438,23 @@ abstract class TreeInfo {
case _ => false
}
+ private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol
+
+ /** If this CaseDef assigns a name to its top-level pattern,
+ * in the form 'expr @ pattern' or 'expr: pattern', returns
+ * the name. Otherwise, nme.NO_NAME.
+ *
+ * Note: in the case of Constant patterns such as 'case x @ "" =>',
+ * the pattern matcher eliminates the binding and inlines the constant,
+ * so as far as this method is likely to be able to determine,
+ * the name is NO_NAME.
+ */
+ def assignedNameOfPattern(cdef: CaseDef): Name = cdef.pat match {
+ case Bind(name, _) => name
+ case Ident(name) => name
+ case _ => nme.NO_NAME
+ }
+
/** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
* whether the user provided cases are exhaustive. */
def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
@@ -453,14 +463,13 @@ abstract class TreeInfo {
}
/** Does this CaseDef catch Throwable? */
- def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe)
-
- /** Does this CaseDef catch everything of a certain Type? */
- def catchesAllOf(cdef: CaseDef, threshold: Type) =
- isDefaultCase(cdef) || (cdef.guard.isEmpty && (unbind(cdef.pat) match {
- case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe != null) && (threshold <:< tpt.tpe)
- case _ => false
- }))
+ def catchesThrowable(cdef: CaseDef) = (
+ cdef.guard.isEmpty && (unbind(cdef.pat) match {
+ case Ident(nme.WILDCARD) => true
+ case i@Ident(name) => hasNoSymbol(i)
+ case _ => false
+ })
+ )
/** Is this pattern node a catch-all or type-test pattern? */
def isCatchCase(cdef: CaseDef) = cdef match {
@@ -485,7 +494,7 @@ abstract class TreeInfo {
tp match {
case TypeRef(pre, sym, args) =>
- args.isEmpty && (sym.owner.isPackageClass || isSimple(pre))
+ args.isEmpty && (sym.isTopLevel || isSimple(pre))
case NoPrefix =>
true
case _ =>
@@ -524,6 +533,10 @@ abstract class TreeInfo {
def isSynthCaseSymbol(sym: Symbol) = sym hasAllFlags SYNTH_CASE_FLAGS
def hasSynthCaseSymbol(t: Tree) = t.symbol != null && isSynthCaseSymbol(t.symbol)
+ def isTraitRef(tree: Tree): Boolean = {
+ val sym = if (tree.tpe != null) tree.tpe.typeSymbol else null
+ ((sym ne null) && sym.initialize.isTrait)
+ }
/** Applications in Scala can have one of the following shapes:
*
@@ -619,6 +632,12 @@ abstract class TreeInfo {
}
loop(tree)
}
+
+ override def toString = {
+ val tstr = if (targs.isEmpty) "" else targs.mkString("[", ", ", "]")
+ val astr = argss map (args => args.mkString("(", ", ", ")")) mkString ""
+ s"$core$tstr$astr"
+ }
}
/** Returns a wrapper that knows how to destructure and analyze applications.
@@ -635,6 +654,8 @@ abstract class TreeInfo {
* For advanced use, call `dissectApplied` explicitly and use its methods instead of pattern matching.
*/
object Applied {
+ def apply(tree: Tree): Applied = new Applied(tree)
+
def unapply(applied: Applied): Option[(Tree, List[Tree], List[List[Tree]])] =
Some((applied.core, applied.targs, applied.argss))
@@ -747,4 +768,15 @@ abstract class TreeInfo {
case tree: RefTree => true
case _ => false
})
+
+ def isMacroApplication(tree: Tree): Boolean =
+ !tree.isDef && tree.symbol != null && tree.symbol.isMacro && !tree.symbol.isErroneous
+
+ def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isMacroApplicationOrBlock(expr)
+ case tree => isMacroApplication(tree)
+ }
+
+ def isNonTrivialMacroApplication(tree: Tree): Boolean =
+ isMacroApplication(tree) && dissectApplied(tree).core != tree
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 754adcb80d..c00337e578 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -14,6 +14,23 @@ trait Trees extends api.Trees { self: SymbolTable =>
private[scala] var nodeCount = 0
+ protected def treeLine(t: Tree): String =
+ if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
+ else t.summaryString
+
+ protected def treeStatus(t: Tree, enclosingTree: Tree = null) = {
+ val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id)
+
+ "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
+ }
+ protected def treeSymStatus(t: Tree) = {
+ val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " "
+ "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
+ if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
+ else treeLine(t)
+ )
+ }
+
abstract class Tree extends TreeContextApiImpl with Attachable with Product {
val id = nodeCount // TODO: add to attachment?
nodeCount += 1
@@ -24,18 +41,24 @@ trait Trees extends api.Trees { self: SymbolTable =>
private[this] var rawtpe: Type = _
final def tpe = rawtpe
- def tpe_=(t: Type) = rawtpe = t
+ @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t)
+
+ def clearType(): this.type = this setType null
def setType(tp: Type): this.type = { rawtpe = tp; this }
def defineType(tp: Type): this.type = setType(tp)
def symbol: Symbol = null //!!!OPT!!! symbol is about 3% of hot compile times -- megamorphic dispatch?
def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
- def hasSymbol = false
+ def hasSymbolField = false
+ @deprecated("Use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField
def isDef = false
def isEmpty = false
+ def nonEmpty = !isEmpty
+
+ def canHaveAttrs = true
/** The canonical way to test if a Tree represents a term.
*/
@@ -62,7 +85,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
private[scala] def copyAttrs(tree: Tree): this.type = {
rawatt = tree.rawatt
tpe = tree.tpe
- if (hasSymbol) symbol = tree.symbol
+ if (hasSymbolField) symbol = tree.symbol
this
}
@@ -158,6 +181,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def substituteThis(clazz: Symbol, to: Tree): Tree =
new ThisSubstituter(clazz, to) transform this
+ def replace(from: Tree, to: Tree): Tree =
+ new TreeReplacer(from, to, positionAware = false) transform this
+
def hasSymbolWhich(f: Symbol => Boolean) =
(symbol ne null) && (symbol ne NoSymbol) && f(symbol)
@@ -210,7 +236,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
trait TypTree extends Tree with TypTreeApi
abstract class SymTree extends Tree with SymTreeContextApi {
- override def hasSymbol = true
+ override def hasSymbolField = true
override var symbol: Symbol = NoSymbol
}
@@ -228,14 +254,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def isDef = true
}
- case object EmptyTree extends TermTree {
- val asList = List(this)
- super.tpe_=(NoType)
- override def tpe_=(t: Type) =
- if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
- override def isEmpty = true
- }
-
abstract class MemberDef extends DefTree with MemberDefApi {
def mods: Modifiers
def keyword: String = this match {
@@ -416,6 +434,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args)
+ // Creates a constructor call from the constructor symbol. This is
+ // to avoid winding up with an OverloadedType for the constructor call.
+ def NewFromConstructor(constructor: Symbol, args: Tree*) = {
+ assert(constructor.isConstructor, constructor)
+ val instance = New(TypeTree(constructor.owner.tpe))
+ val init = Select(instance, nme.CONSTRUCTOR) setSymbol constructor
+
+ Apply(init, args.toList)
+ }
+
case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree
case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi {
@@ -511,7 +539,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
case t => t
}
- orig = followOriginal(tree); setPos(tree.pos);
+ orig = followOriginal(tree); setPos(tree.pos)
this
}
@@ -603,6 +631,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
// TODO: ApplyConstructor ???
+ case self.pendingSuperCall => self.pendingSuperCall
case _ => new Apply(fun, args)
}).copyAttrs(tree)
def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
@@ -866,7 +895,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
/** Is the tree Predef, scala.Predef, or _root_.scala.Predef?
*/
def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef)
- def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal)
// --- modifiers implementation ---------------------------------------
@@ -924,13 +952,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
def withPosition(flag: Long, position: Position) =
copy() setPositions positions + (flag -> position)
- override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
- Modifiers(flags, privateWithin, f(annotations)) setPositions positions
+ override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = {
+ val newAnns = f(annotations)
+ if (annotations == newAnns) this
+ else Modifiers(flags, privateWithin, newAnns) setPositions positions
+ }
override def toString = "Modifiers(%s, %s, %s)".format(flagString, annotations mkString ", ", positions)
}
- object Modifiers extends ModifiersCreator
+ object Modifiers extends ModifiersExtractor
implicit val ModifiersTag = ClassTag[Modifiers](classOf[Modifiers])
@@ -965,12 +996,23 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
- object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
- override def isEmpty = true
+ trait CannotHaveAttrs extends Tree {
+ override def canHaveAttrs = false
+
+ private def unsupported(what: String, args: Any*) =
+ throw new UnsupportedOperationException(s"$what($args) inapplicable for "+self.toString)
+
super.setPos(NoPosition)
- override def setPos(pos: Position) = { assert(false); this }
+ override def setPos(pos: Position) = unsupported("setPos", pos)
+
+ super.setType(NoType)
+ override def tpe_=(t: Type) = if (t != NoType) unsupported("tpe_=", t)
}
+ case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
+ object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs
+ object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs
+
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
@@ -1050,6 +1092,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
def New(tpe: Type, args: Tree*): Tree =
ApplyConstructor(TypeTree(tpe), args.toList)
+ def New(tpe: Type, argss: List[List[Tree]]): Tree =
+ New(TypeTree(tpe), argss)
+
def New(sym: Symbol, args: Tree*): Tree =
New(sym.tpe, args: _*)
@@ -1130,7 +1175,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
traverse(annot); traverse(arg)
case Template(parents, self, body) =>
traverseTrees(parents)
- if (!self.isEmpty) traverse(self)
+ if (self ne emptyValDef) traverse(self)
traverseStats(body, tree.symbol)
case Block(stats, expr) =>
traverseTrees(stats); traverse(expr)
@@ -1372,6 +1417,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (tree eq orig) super.transform(tree)
else tree
}
+
+ /** A transformer that replaces tree `from` with tree `to` in a given tree */
+ class TreeReplacer(from: Tree, to: Tree, positionAware: Boolean) extends Transformer {
+ override def transform(t: Tree): Tree = {
+ if (t == from) to
+ else if (!positionAware || (t.pos includes from.pos) || t.pos.isTransparent) super.transform(t)
+ else t
+ }
+ }
+
// Create a readable string describing a substitution.
private def substituterString(fromStr: String, toStr: String, from: List[Any], to: List[Any]): String = {
"subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
@@ -1387,7 +1442,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def subst(from: List[Symbol], to: List[Tree]): Tree =
if (from.isEmpty) tree
else if (tree.symbol == from.head) to.head.shallowDuplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`?
- else subst(from.tail, to.tail);
+ else subst(from.tail, to.tail)
subst(from, to)
case _ =>
super.transform(tree)
@@ -1400,7 +1455,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
class ThisSubstituter(clazz: Symbol, to: => Tree) extends Transformer {
val newtpe = to.tpe
override def transform(tree: Tree) = {
- if (tree.tpe ne null) tree.tpe = tree.tpe.substThis(clazz, newtpe)
+ tree modifyType (_.substThis(clazz, newtpe))
tree match {
case This(_) if tree.symbol == clazz => to
case _ => super.transform(tree)
@@ -1410,8 +1465,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
class TypeMapTreeSubstituter(val typeMap: TypeMap) extends Traverser {
override def traverse(tree: Tree) {
- if (tree.tpe ne null)
- tree.tpe = typeMap(tree.tpe)
+ tree modifyType typeMap
if (tree.isDef)
tree.symbol modifyInfo typeMap
@@ -1443,9 +1497,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (tree.symbol == from.head) tree setSymbol to.head
else subst(from.tail, to.tail)
}
+ tree modifyType symSubst
- if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe)
- if (tree.hasSymbol) {
+ if (tree.hasSymbolField) {
subst(from, to)
tree match {
case _: DefTree =>
@@ -1512,6 +1566,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
+ private lazy val duplicator = new Duplicator(focusPositions = true)
+ private class Duplicator(focusPositions: Boolean) extends Transformer {
+ override val treeCopy = newStrictTreeCopier
+ override def transform(t: Tree) = {
+ val t1 = super.transform(t)
+ if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus
+ t1
+ }
+ }
trait TreeStackTraverser extends Traverser {
import collection.mutable
val path: mutable.Stack[Tree] = mutable.Stack()
@@ -1521,14 +1584,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
- private lazy val duplicator = new Transformer {
- override val treeCopy = newStrictTreeCopier
- override def transform(t: Tree) = {
- val t1 = super.transform(t)
- if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
- t1
- }
- }
+ def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree
// ------ copiers -------------------------------------------
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index 68b4fa69a1..d437b1b058 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -9,8 +9,6 @@ package internal
trait TypeDebugging {
self: SymbolTable =>
- import definitions._
-
// @M toString that is safe during debugging (does not normalize, ...)
object typeDebug {
private def to_s(x: Any): String = x match {
@@ -20,7 +18,6 @@ trait TypeDebugging {
case x: Product => x.productIterator mkString ("(", ", ", ")")
case _ => "" + x
}
- def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ")
def ptBlock(label: String, pairs: (String, Any)*): String = {
if (pairs.isEmpty) label + "{ }"
else {
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index a27b37dae5..e1433d1893 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -14,8 +14,8 @@ import Flags._
import scala.util.control.ControlThrowable
import scala.annotation.tailrec
import util.Statistics
-import scala.runtime.ObjectRef
import util.ThreeValues._
+import Variance._
/* A standard type pattern match:
case ErrorType =>
@@ -68,33 +68,37 @@ import util.ThreeValues._
// a type variable
// Replace occurrences of type parameters with type vars, where
// inst is the instantiation and constr is a list of bounds.
- case DeBruijnIndex(level, index, args)
- // for dependent method types: a type referring to a method parameter.
- case ErasedValueType(tref)
+ case ErasedValueType(clazz, underlying)
// only used during erasure of derived value classes.
*/
-trait Types extends api.Types { self: SymbolTable =>
+trait Types
+ extends api.Types
+ with tpe.TypeComparers
+ with tpe.TypeToStrings
+ with tpe.CommonOwners
+ with tpe.GlbLubs
+ with tpe.TypeMaps
+ with tpe.TypeConstraints { self: SymbolTable =>
+
import definitions._
import TypesStats._
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
- private final val LogPendingSubTypesThreshold = 50
- private final val LogPendingBaseTypesThreshold = 50
- private final val LogVolatileThreshold = 50
+ protected[internal] final val DefaultLogThreshhold = 50
+ private final val LogPendingBaseTypesThreshold = DefaultLogThreshhold
+ private final val LogVolatileThreshold = DefaultLogThreshhold
/** A don't care value for the depth parameter in lubs/glbs and related operations. */
- private final val AnyDepth = -3
+ protected[internal] final val AnyDepth = -3
/** Decrement depth unless it is a don't care. */
- private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
+ protected[internal] final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
- private final val printLubs = sys.props contains "scalac.debug.lub"
private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
- /** In case anyone wants to turn off lub verification without reverting anything. */
- private final val verifyLubs = true
+ private final val breakCycles = settings.breakCycles.value
/** In case anyone wants to turn off type parameter bounds being used
* to seed type constraints.
*/
@@ -102,107 +106,11 @@ trait Types extends api.Types { self: SymbolTable =>
protected val enableTypeVarExperimentals = settings.Xexperimental.value
- /** Empty immutable maps to avoid allocations. */
- private val emptySymMap = immutable.Map[Symbol, Symbol]()
- private val emptySymCount = immutable.Map[Symbol, Int]()
-
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
var skolemizationLevel = 0
- /** A log of type variable with their original constraints. Used in order
- * to undo constraints in the case of isSubType/isSameType failure.
- */
- lazy val undoLog = newUndoLog
-
- protected def newUndoLog = new UndoLog
-
- class UndoLog extends Clearable {
- private type UndoPairs = List[(TypeVar, TypeConstraint)]
- //OPT this method is public so we can do `manual inlining`
- var log: UndoPairs = List()
-
- /*
- * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
- *
- * The idea behind explicit locking mechanism is that all public methods that access mutable state
- * will have to obtain the lock for their entire execution so both reads and writes can be kept in
- * right order. Originally, that was achieved by overriding those public methods in
- * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
- * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
- * can go away.
- *
- * By using explicit locking we can achieve inlining.
- *
- * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
- * places implementation of `undo` or `undoUnless`). This should be changed back to protected
- * once inliner is fixed.
- */
- def lock(): Unit = ()
- def unlock(): Unit = ()
-
- // register with the auto-clearing cache manager
- perRunCaches.recordCache(this)
-
- /** Undo all changes to constraints to type variables upto `limit`. */
- //OPT this method is public so we can do `manual inlining`
- def undoTo(limit: UndoPairs) {
- assertCorrectThread()
- while ((log ne limit) && log.nonEmpty) {
- val (tv, constr) = log.head
- tv.constr = constr
- log = log.tail
- }
- }
-
- /** No sync necessary, because record should only
- * be called from within a undo or undoUnless block,
- * which is already synchronized.
- */
- private[reflect] def record(tv: TypeVar) = {
- log ::= ((tv, tv.constr.cloneInternal))
- }
-
- def clear() {
- lock()
- try {
- if (settings.debug.value)
- self.log("Clearing " + log.size + " entries from the undoLog.")
- log = Nil
- } finally unlock()
- }
- def size = {
- lock()
- try log.size finally unlock()
- }
-
- // `block` should not affect constraints on typevars
- def undo[T](block: => T): T = {
- lock()
- try {
- val before = log
-
- try block
- finally undoTo(before)
- } finally unlock()
- }
-
- // if `block` evaluates to false, it should not affect constraints on typevars
- def undoUnless(block: => Boolean): Boolean = {
- lock()
- try {
- val before = log
- var result = false
-
- try result = block
- finally if (!result) undoTo(before)
-
- result
- } finally unlock()
- }
- }
-
/** A map from lists to compound types that have the given list as parents.
* This is used to avoid duplication in the computation of base type sequences and baseClasses.
* It makes use of the fact that these two operations depend only on the parents,
@@ -256,7 +164,14 @@ trait Types extends api.Types { self: SymbolTable =>
* forwarded here. Some operations are rewrapped again.
*/
trait RewrappingTypeProxy extends SimpleTypeProxy {
- protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp)
+ protected def maybeRewrap(newtp: Type) = (
+ if (newtp eq underlying) this
+ // BoundedWildcardTypes reach here during erroneous compilation: neg/t6258
+ // Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800
+ // Otherwise, if newtp =:= underlying, don't rewrap it.
+ else if (!newtp.isWildcard && !newtp.isHigherKinded && (newtp =:= underlying)) this
+ else rewrap(newtp)
+ )
protected def rewrap(newtp: Type): Type
// the following are all operations in class Type that are overridden in some subclass
@@ -297,7 +212,6 @@ trait Types extends api.Types { self: SymbolTable =>
abstract class TypeApiImpl extends TypeApi { this: Type =>
def declaration(name: Name): Symbol = decl(name)
- def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name)
def declarations = decls
def typeArguments = typeArgs
def erasure = this match {
@@ -384,15 +298,11 @@ trait Types extends api.Types { self: SymbolTable =>
/** Is this type produced as a repair for an error? */
def isErroneous: Boolean = ErroneousCollector.collect(this)
- /** Does this type denote a reference type which can be null? */
- // def isNullable: Boolean = false
-
/** Can this type only be subtyped by bottom types?
* This is assessed to be the case if the class is final,
* and all type parameters (if any) are invariant.
*/
- def isFinalType: Boolean =
- typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant) && prefix.isStable
+ def isFinalType = typeSymbol.hasOnlyBottomSubclasses && prefix.isStable
/** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
@@ -524,11 +434,6 @@ trait Types extends api.Types { self: SymbolTable =>
/** Only used for dependent method types. */
def resultApprox: Type = ApproximateDependentMap(resultType)
- /** If this is a TypeRef `clazz`[`T`], return the argument `T`
- * otherwise return this type
- */
- def remove(clazz: Symbol): Type = this
-
/** For a curried/nullary method or poly type its non-method result type,
* the type itself for all other types */
def finalResultType: Type = this
@@ -589,6 +494,26 @@ trait Types extends api.Types { self: SymbolTable =>
* Example: (in the below, `<List>` is the type constructor of List)
* TypeRef(pre, `<List>`, List()) is replaced by
* PolyType(X, TypeRef(pre, `<List>`, List(X)))
+ *
+ * Discussion: normalize is NOT usually what you want to be calling.
+ * The (very real) danger with normalize is that it will force types
+ * which would not otherwise have been forced, leading to mysterious
+ * behavioral differences, cycles, and other elements of mysteries.
+ * Under most conditions the method you should be calling is `dealiasWiden`
+ * (see that method for more info.)
+ *
+ * Here are a few of the side-effect-trail-leaving methods called
+ * by various implementations of normalize:
+ *
+ * - sym.info
+ * - tpe.etaExpand
+ * - tpe.betaReduce
+ * - tpe.memberType
+ * - sym.nextOverriddenSymbol
+ * - constraint.inst
+ *
+ * If you've been around the compiler a while that list must fill
+ * your heart with fear.
*/
def normalize = this // @MAT
@@ -598,6 +523,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** Repeatedly apply widen and dealias until they have no effect.
* This compensates for the fact that type aliases can hide beneath
* singleton types and singleton types can hide inside type aliases.
+ * !!! - and yet it is still inadequate, because aliases and singletons
+ * might lurk in the upper bounds of an abstract type. See SI-7051.
*/
def dealiasWiden: Type = (
if (this ne widen) widen.dealiasWiden
@@ -686,16 +613,6 @@ trait Types extends api.Types { self: SymbolTable =>
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
- /** All members with the given flags, excluding bridges.
- */
- def membersWithFlags(requiredFlags: Long): Scope =
- membersBasedOnFlags(BridgeFlags, requiredFlags)
-
- /** All non-private members with the given flags, excluding bridges.
- */
- def nonPrivateMembersWithFlags(requiredFlags: Long): Scope =
- membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags)
-
/** The non-private member with given name, admitting members with given flags `admit`.
* "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
* flag are usually excluded from findMember results, but supplying any of those flags
@@ -716,10 +633,9 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): Scope =
findMembers(excludedFlags, requiredFlags)
-// findMember(nme.ANYNAME, excludedFlags, requiredFlags, false).alternatives
def memberBasedOnName(name: Name, excludedFlags: Long): Symbol =
- findMember(name, excludedFlags, 0, false)
+ findMember(name, excludedFlags, 0, stableOnly = false)
/** The least type instance of given class which is a supertype
* of this type. Example:
@@ -750,7 +666,7 @@ trait Types extends api.Types { self: SymbolTable =>
)
if (trivial) this
else {
- val m = new AsSeenFromMap(pre.normalize, clazz)
+ val m = newAsSeenFromMap(pre.normalize, clazz)
val tp = m(this)
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -770,6 +686,7 @@ trait Types extends api.Types { self: SymbolTable =>
* }}}
*/
def memberInfo(sym: Symbol): Type = {
+ require(sym ne NoSymbol, this)
sym.info.asSeenFrom(this, sym.owner)
}
@@ -830,7 +747,6 @@ trait Types extends api.Types { self: SymbolTable =>
else substThis(from, to).substSym(symsFrom, symsTo)
/** Returns all parts of this type which satisfy predicate `p` */
- def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this
def withFilter(p: Type => Boolean) = new FilterMapForeach(p)
class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){
@@ -860,9 +776,6 @@ trait Types extends api.Types { self: SymbolTable =>
/** Does this type contain a reference to this symbol? */
def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this)
- /** Does this type contain a reference to this type */
- def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this)
-
/** Is this type a subtype of that type? */
def <:<(that: Type): Boolean = {
if (Statistics.canEnable) stat_<:<(that)
@@ -874,23 +787,27 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** Is this type a subtype of that type in a pattern context?
- * Any type arguments on the right hand side are replaced with
+ * Dummy type arguments on the right hand side are replaced with
* fresh existentials, except for Arrays.
*
* See bug1434.scala for an example of code which would fail
* if only a <:< test were applied.
*/
- def matchesPattern(that: Type): Boolean = {
- (this <:< that) || ((this, that) match {
- case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty =>
- arg1 matchesPattern arg2
- case (_, TypeRef(_, _, args)) =>
- val newtp = existentialAbstraction(args map (_.typeSymbol), that)
- !(that =:= newtp) && (this <:< newtp)
- case _ =>
- false
- })
- }
+ def matchesPattern(that: Type): Boolean = (this <:< that) || (that match {
+ case ArrayTypeRef(elem2) if elem2.typeConstructor.isHigherKinded =>
+ this match {
+ case ArrayTypeRef(elem1) => elem1 matchesPattern elem2
+ case _ => false
+ }
+ case TypeRef(_, sym, args) =>
+ val that1 = existentialAbstraction(args map (_.typeSymbol), that)
+ (that ne that1) && (this <:< that1) && {
+ log(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1")
+ true
+ }
+ case _ =>
+ false
+ })
def stat_<:<(that: Type): Boolean = {
if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
@@ -921,12 +838,7 @@ trait Types extends api.Types { self: SymbolTable =>
(this eq that) ||
(if (explainSwitch) explain("=", isSameType, this, that)
else isSameType(this, that))
- );
-
- /** Does this type implement symbol `sym` with same or stronger type? */
- def specializes(sym: Symbol): Boolean =
- if (explainSwitch) explain("specializes", specializesSym, this, sym)
- else specializesSym(this, sym)
+ )
/** Is this type close enough to that type so that members
* with the two type would override each other?
@@ -943,7 +855,7 @@ trait Types extends api.Types { self: SymbolTable =>
def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes)
/** Same as matches, except that non-method types are always assumed to match. */
- def looselyMatches(that: Type): Boolean = matchesType(this, that, true)
+ def looselyMatches(that: Type): Boolean = matchesType(this, that, alwaysMatchSimple = true)
/** The shortest sorted upwards closed array of types that contains
* this type as first element.
@@ -1072,69 +984,66 @@ trait Types extends api.Types { self: SymbolTable =>
}
def findMembers(excludedFlags: Long, requiredFlags: Long): Scope = {
- // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
- // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
- // without this, the matchesType call would lead to type variables on both sides
- // of a subtyping/equality judgement, which can lead to recursive types being constructed.
- // See (t0851) for a situation where this happens.
- val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
-
- if (Statistics.canEnable) Statistics.incCounter(findMembersCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null
-
- //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
- var members: Scope = null
- var required = requiredFlags
- var excluded = excludedFlags | DEFERRED
- var continue = true
- var self: Type = null
- while (continue) {
- continue = false
- val bcs0 = baseClasses
- var bcs = bcs0
- while (!bcs.isEmpty) {
- val decls = bcs.head.info.decls
- var entry = decls.elems
- while (entry ne null) {
- val sym = entry.sym
- val flags = sym.flags
- if ((flags & required) == required) {
- val excl = flags & excluded
- if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
- (bcs eq bcs0) ||
- (flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
- if (members eq null) members = newFindMemberScope
- var others: ScopeEntry = members.lookupEntry(sym.name)
- var symtpe: Type = null
- while ((others ne null) && {
- val other = others.sym
- (other ne sym) &&
- ((other.owner eq sym.owner) ||
- (flags & PRIVATE) != 0 || {
- if (self eq null) self = narrowForFindMember(this)
- if (symtpe eq null) symtpe = self.memberType(sym)
- !(self.memberType(other) matches symtpe)
- })}) {
- others = members lookupNextEntry others
+ def findMembersInternal: Scope = {
+ var members: Scope = null
+ if (Statistics.canEnable) Statistics.incCounter(findMembersCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null
+
+ //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
+ var required = requiredFlags
+ var excluded = excludedFlags | DEFERRED
+ var continue = true
+ var self: Type = null
+ while (continue) {
+ continue = false
+ val bcs0 = baseClasses
+ var bcs = bcs0
+ while (!bcs.isEmpty) {
+ val decls = bcs.head.info.decls
+ var entry = decls.elems
+ while (entry ne null) {
+ val sym = entry.sym
+ val flags = sym.flags
+ if ((flags & required) == required) {
+ val excl = flags & excluded
+ if (excl == 0L &&
+ (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (bcs eq bcs0) ||
+ (flags & PrivateLocal) != PrivateLocal ||
+ (bcs0.head.hasTransOwner(bcs.head)))) {
+ if (members eq null) members = newFindMemberScope
+ var others: ScopeEntry = members.lookupEntry(sym.name)
+ var symtpe: Type = null
+ while ((others ne null) && {
+ val other = others.sym
+ (other ne sym) &&
+ ((other.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = narrowForFindMember(this)
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ !(self.memberType(other) matches symtpe)
+ })}) {
+ others = members lookupNextEntry others
+ }
+ if (others eq null) members enter sym
+ } else if (excl == DEFERRED) {
+ continue = true
}
- if (others eq null) members enter sym
- } else if (excl == DEFERRED) {
- continue = true
}
- }
- entry = entry.next
- } // while (entry ne null)
- // excluded = excluded | LOCAL
- bcs = bcs.tail
- } // while (!bcs.isEmpty)
- required |= DEFERRED
- excluded &= ~(DEFERRED.toLong)
- } // while (continue)
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- if (members eq null) EmptyScope else members
+ entry = entry.next
+ } // while (entry ne null)
+ // excluded = excluded | LOCAL
+ bcs = bcs.tail
+ } // while (!bcs.isEmpty)
+ required |= DEFERRED
+ excluded &= ~(DEFERRED.toLong)
+ } // while (continue)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (members eq null) EmptyScope else members
+ }
+
+ if (this.isGround) findMembersInternal
+ else suspendingTypeVars(typeVarsInType(this))(findMembersInternal)
}
/**
@@ -1148,102 +1057,98 @@ trait Types extends api.Types { self: SymbolTable =>
*/
//TODO: use narrow only for modules? (correct? efficiency gain?)
def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
- // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
- // without this, the matchesType call would lead to type variables on both sides
- // of a subtyping/equality judgement, which can lead to recursive types being constructed.
- // See (t0851) for a situation where this happens.
- val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
-
- if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
-
- //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
- var member: Symbol = NoSymbol
- var members: List[Symbol] = null
- var lastM: ::[Symbol] = null
- var membertpe: Type = null
- var required = requiredFlags
- var excluded = excludedFlags | DEFERRED
- var continue = true
- var self: Type = null
-
- while (continue) {
- continue = false
- val bcs0 = baseClasses
- var bcs = bcs0
- while (!bcs.isEmpty) {
- val decls = bcs.head.info.decls
- var entry = decls.lookupEntry(name)
- while (entry ne null) {
- val sym = entry.sym
- val flags = sym.flags
- if ((flags & required) == required) {
- val excl = flags & excluded
- if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
- (bcs eq bcs0) ||
- (flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
- if (name.isTypeName || stableOnly && sym.isStable) {
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- return sym
- } else if (member eq NoSymbol) {
- member = sym
- } else if (members eq null) {
- if ((member ne sym) &&
- ((member.owner eq sym.owner) ||
- (flags & PRIVATE) != 0 || {
- if (self eq null) self = narrowForFindMember(this)
- if (membertpe eq null) membertpe = self.memberType(member)
- !(membertpe matches self.memberType(sym))
- })) {
- lastM = new ::(sym, null)
- members = member :: lastM
- }
- } else {
- var others: List[Symbol] = members
- var symtpe: Type = null
- while ((others ne null) && {
- val other = others.head
- (other ne sym) &&
- ((other.owner eq sym.owner) ||
+ def findMemberInternal: Symbol = {
+ var member: Symbol = NoSymbol
+ var members: List[Symbol] = null
+ var lastM: ::[Symbol] = null
+ if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
+
+ //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
+ var membertpe: Type = null
+ var required = requiredFlags
+ var excluded = excludedFlags | DEFERRED
+ var continue = true
+ var self: Type = null
+
+ while (continue) {
+ continue = false
+ val bcs0 = baseClasses
+ var bcs = bcs0
+ while (!bcs.isEmpty) {
+ val decls = bcs.head.info.decls
+ var entry = decls.lookupEntry(name)
+ while (entry ne null) {
+ val sym = entry.sym
+ val flags = sym.flags
+ if ((flags & required) == required) {
+ val excl = flags & excluded
+ if (excl == 0L &&
+ (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (bcs eq bcs0) ||
+ (flags & PrivateLocal) != PrivateLocal ||
+ (bcs0.head.hasTransOwner(bcs.head)))) {
+ if (name.isTypeName || stableOnly && sym.isStable) {
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ return sym
+ } else if (member eq NoSymbol) {
+ member = sym
+ } else if (members eq null) {
+ if ((member ne sym) &&
+ ((member.owner eq sym.owner) ||
(flags & PRIVATE) != 0 || {
if (self eq null) self = narrowForFindMember(this)
- if (symtpe eq null) symtpe = self.memberType(sym)
- !(self.memberType(other) matches symtpe)
- })}) {
- others = others.tail
- }
- if (others eq null) {
- val lastM1 = new ::(sym, null)
- lastM.tl = lastM1
- lastM = lastM1
+ if (membertpe eq null) membertpe = self.memberType(member)
+ !(membertpe matches self.memberType(sym))
+ })) {
+ lastM = new ::(sym, null)
+ members = member :: lastM
+ }
+ } else {
+ var others: List[Symbol] = members
+ var symtpe: Type = null
+ while ((others ne null) && {
+ val other = others.head
+ (other ne sym) &&
+ ((other.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = narrowForFindMember(this)
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ !(self.memberType(other) matches symtpe)
+ })}) {
+ others = others.tail
+ }
+ if (others eq null) {
+ val lastM1 = new ::(sym, null)
+ lastM.tl = lastM1
+ lastM = lastM1
+ }
}
+ } else if (excl == DEFERRED) {
+ continue = true
}
- } else if (excl == DEFERRED) {
- continue = true
}
- }
- entry = decls lookupNextEntry entry
- } // while (entry ne null)
- // excluded = excluded | LOCAL
- bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
- } // while (!bcs.isEmpty)
- required |= DEFERRED
- excluded &= ~(DEFERRED.toLong)
- } // while (continue)
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- if (members eq null) {
- if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
- member
- } else {
- if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
- lastM.tl = Nil
- baseClasses.head.newOverloaded(this, members)
+ entry = decls lookupNextEntry entry
+ } // while (entry ne null)
+ // excluded = excluded | LOCAL
+ bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
+ } // while (!bcs.isEmpty)
+ required |= DEFERRED
+ excluded &= ~(DEFERRED.toLong)
+ } // while (continue)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (members eq null) {
+ if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
+ member
+ } else {
+ if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
+ lastM.tl = Nil
+ baseClasses.head.newOverloaded(this, members)
+ }
}
+
+ if (this.isGround) findMemberInternal
+ else suspendingTypeVars(typeVarsInType(this))(findMemberInternal)
}
/** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */
@@ -1272,10 +1177,6 @@ trait Types extends api.Types { self: SymbolTable =>
def setAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
def withAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
- /** Remove any annotations from this type and from any
- * types embedded in this type. */
- def stripAnnotations = StripAnnotationsMap(this)
-
/** Set the self symbol of an annotated type, or do nothing
* otherwise. */
def withSelfsym(sym: Symbol) = this
@@ -1368,7 +1269,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def baseType(clazz: Symbol): Type = this
override def safeToString: String = "<error>"
override def narrow: Type = this
- // override def isNullable: Boolean = true
override def kind = "ErrorType"
}
@@ -1378,7 +1278,6 @@ trait Types extends api.Types { self: SymbolTable =>
case object WildcardType extends Type {
override def isWildcard = true
override def safeToString: String = "?"
- // override def isNullable: Boolean = true
override def kind = "WildcardType"
}
/** BoundedWildcardTypes, used only during type inference, are created in
@@ -1403,7 +1302,6 @@ trait Types extends api.Types { self: SymbolTable =>
case object NoType extends Type {
override def isTrivial: Boolean = true
override def safeToString: String = "<notype>"
- // override def isNullable: Boolean = true
override def kind = "NoType"
}
@@ -1413,7 +1311,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def isStable: Boolean = true
override def prefixString = ""
override def safeToString: String = "<noprefix>"
- // override def isNullable: Boolean = true
override def kind = "NoPrefixType"
}
@@ -1426,7 +1323,6 @@ trait Types extends api.Types { self: SymbolTable =>
abort(s"ThisType($sym) for sym which is not a class")
}
- //assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
override def isTrivial: Boolean = sym.isPackageClass
override def isNotNull = true
override def typeSymbol = sym
@@ -1452,7 +1348,7 @@ trait Types extends api.Types { self: SymbolTable =>
def apply(sym: Symbol): Type = (
if (!phase.erasedTypes) unique(new UniqueThisType(sym))
else if (sym.isImplClass) sym.typeOfThis
- else sym.tpe
+ else sym.tpe_*
)
}
@@ -1467,7 +1363,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def isGround = sym.isPackageClass || pre.isGround
- // override def isNullable = underlying.isNullable
override def isNotNull = underlying.isNotNull
private[reflect] var underlyingCache: Type = NoType
private[reflect] var underlyingPeriod = NoPeriod
@@ -1523,7 +1418,7 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.underlyingPeriod = currentPeriod
if (!isValid(period)) {
// [Eugene to Paul] needs review
- tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType;
+ tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType
assert(tpe.underlyingCache ne tpe, tpe)
}
}
@@ -1535,7 +1430,8 @@ trait Types extends api.Types { self: SymbolTable =>
if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial)
toBoolean(trivial)
}
- override def isNotNull = true;
+ override def isNotNull = true
+
override def typeSymbol = thistpe.typeSymbol
override def underlying = supertpe
override def prefix: Type = supertpe.prefix
@@ -1565,11 +1461,10 @@ trait Types extends api.Types { self: SymbolTable =>
}
private def lowerString = if (emptyLowerBound) "" else " >: " + lo
private def upperString = if (emptyUpperBound) "" else " <: " + hi
- private def emptyLowerBound = typeIsNothing(lo)
- private def emptyUpperBound = typeIsAny(hi)
+ private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard
+ private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard
def isEmptyBounds = emptyLowerBound && emptyUpperBound
- // override def isNullable: Boolean = NullClass.tpe <:< lo;
override def safeToString = lowerString + upperString
override def kind = "TypeBoundsType"
}
@@ -1654,15 +1549,44 @@ trait Types extends api.Types { self: SymbolTable =>
override def isStructuralRefinement: Boolean =
typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement)
- // override def isNullable: Boolean =
- // parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
-
override def safeToString: String = parentsString(parents) + (
(if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
fullyInitializeScope(decls).mkString("{", "; ", "}") else "")
)
}
+ protected def computeBaseClasses(tpe: Type): List[Symbol] = {
+ val parents = tpe.parents // adriaan says tpe.parents does work sometimes, so call it only once
+ val baseTail = (
+ if (parents.isEmpty || parents.head.isInstanceOf[PackageTypeRef]) Nil
+ else {
+ //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
+ // optimized, since this seems to be performance critical
+ val superclazz = parents.head // parents.isEmpty was already excluded
+ var mixins = parents.tail
+ val sbcs = superclazz.baseClasses
+ var bcs = sbcs
+ def isNew(clazz: Symbol): Boolean = (
+ superclazz.baseTypeIndex(clazz) < 0 &&
+ { var p = bcs
+ while ((p ne sbcs) && (p.head != clazz)) p = p.tail
+ p eq sbcs
+ }
+ )
+ while (!mixins.isEmpty) {
+ def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
+ if (mbcs.isEmpty) bcs
+ else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
+ else addMixinBaseClasses(mbcs.tail)
+ bcs = addMixinBaseClasses(mixins.head.baseClasses)
+ mixins = mixins.tail
+ }
+ bcs
+ }
+ )
+ tpe.typeSymbol :: baseTail
+ }
+
protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
val period = tpe.baseTypeSeqPeriod
if (period != currentPeriod) {
@@ -1682,7 +1606,7 @@ trait Types extends api.Types { self: SymbolTable =>
val paramToVarMap = varToParamMap map (_.swap)
val varToParam = new TypeMap {
def apply(tp: Type) = varToParamMap get tp match {
- case Some(sym) => sym.tpe
+ case Some(sym) => sym.tpe_*
case _ => mapOver(tp)
}
}
@@ -1701,7 +1625,7 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache =
if (tpe.typeSymbol.isRefinementClass)
- tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe_*)
else
compoundBaseTypeSeq(tpe)
} finally {
@@ -1723,41 +1647,61 @@ trait Types extends api.Types { self: SymbolTable =>
throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
}
- protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = {
- def computeBaseClasses: List[Symbol] =
- if (tpe.parents.isEmpty) List(tpe.typeSymbol)
- else {
- //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
- // optimized, since this seems to be performance critical
- val superclazz = tpe.firstParent
- var mixins = tpe.parents.tail
- val sbcs = superclazz.baseClasses
- var bcs = sbcs
- def isNew(clazz: Symbol): Boolean =
- superclazz.baseTypeIndex(clazz) < 0 &&
- { var p = bcs;
- while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
- p eq sbcs
- }
- while (!mixins.isEmpty) {
- def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
- if (mbcs.isEmpty) bcs
- else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
- else addMixinBaseClasses(mbcs.tail)
- bcs = addMixinBaseClasses(mixins.head.baseClasses)
- mixins = mixins.tail
+ object baseClassesCycleMonitor {
+ private var open: List[Symbol] = Nil
+ @inline private def cycleLog(msg: => String) {
+ if (settings.debug.value)
+ Console.err.println(msg)
+ }
+ def size = open.size
+ def push(clazz: Symbol) {
+ cycleLog("+ " + (" " * size) + clazz.fullNameString)
+ open ::= clazz
+ }
+ def pop(clazz: Symbol) {
+ assert(open.head eq clazz, (clazz, open))
+ open = open.tail
+ }
+ def isOpen(clazz: Symbol) = open contains clazz
+ }
+
+ protected def defineBaseClassesOfCompoundType(tpe: CompoundType) {
+ def define() = defineBaseClassesOfCompoundType(tpe, force = false)
+ if (!breakCycles || isPastTyper) define()
+ else tpe match {
+ // non-empty parents helpfully excludes all package classes
+ case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass =>
+ // Cycle: force update
+ if (baseClassesCycleMonitor isOpen clazz)
+ defineBaseClassesOfCompoundType(tpe, force = true)
+ else {
+ baseClassesCycleMonitor push clazz
+ try define()
+ finally baseClassesCycleMonitor pop clazz
}
- tpe.typeSymbol :: bcs
- }
+ case _ =>
+ define()
+ }
+ }
+ private def defineBaseClassesOfCompoundType(tpe: CompoundType, force: Boolean) {
val period = tpe.baseClassesPeriod
- if (period != currentPeriod) {
+ if (period == currentPeriod) {
+ if (force && breakCycles) {
+ def what = tpe.typeSymbol + " in " + tpe.typeSymbol.owner.fullNameString
+ val bcs = computeBaseClasses(tpe)
+ tpe.baseClassesCache = bcs
+ warning(s"Breaking cycle in base class computation of $what ($bcs)")
+ }
+ }
+ else {
tpe.baseClassesPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null
try {
tpe.baseClassesCache = null
- tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail)
- } finally {
+ tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail)
+ }
+ finally {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
@@ -1953,7 +1897,7 @@ trait Types extends api.Types { self: SymbolTable =>
case tr @ TypeRef(_, sym, args) if args.nonEmpty =>
val tparams = tr.initializedTypeParams
if (settings.debug.value && !sameLength(tparams, args))
- debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
+ devWarning(s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args")
foreach2(tparams, args) { (tparam1, arg) =>
if (arg contains tparam) {
@@ -1996,7 +1940,7 @@ trait Types extends api.Types { self: SymbolTable =>
var change = false
for ((from, targets) <- refs(NonExpansive).iterator)
for (target <- targets) {
- var thatInfo = classInfo(target)
+ val thatInfo = classInfo(target)
if (thatInfo.state != Initialized)
change = change | thatInfo.propagate()
addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target))
@@ -2004,7 +1948,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
for ((from, targets) <- refs(Expansive).iterator)
for (target <- targets) {
- var thatInfo = classInfo(target)
+ val thatInfo = classInfo(target)
if (thatInfo.state != Initialized)
change = change | thatInfo.propagate()
addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target))
@@ -2015,11 +1959,6 @@ trait Types extends api.Types { self: SymbolTable =>
change
}
- // override def isNullable: Boolean =
- // symbol == AnyClass ||
- // symbol != NothingClass && (symbol isSubClass ObjectClass) && !(symbol isSubClass NonNullClass);
-
- // override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
override def kind = "ClassInfoType"
override def safeToString =
@@ -2044,8 +1983,6 @@ trait Types extends api.Types { self: SymbolTable =>
extends ClassInfoType(List(), decls, clazz)
/** A class representing a constant type.
- *
- * @param value ...
*/
abstract case class ConstantType(value: Constant) extends SingletonType with ConstantTypeApi {
override def underlying: Type = value.tpe
@@ -2055,8 +1992,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def deconst: Type = underlying
override def safeToString: String =
underlying.toString + "(" + value.escapedStringValue + ")"
- // override def isNullable: Boolean = value.value eq null
- // override def isNonNull: Boolean = value.value ne null
override def kind = "ConstantType"
}
@@ -2086,7 +2021,7 @@ trait Types extends api.Types { self: SymbolTable =>
// it later turns out not to have kind *. See SI-4070. Only
// logging it for now.
if (sym.typeParams.size != args.size)
- log("!!! %s.transform(%s), but tparams.isEmpty and args=".format(this, tp, args))
+ devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
}
@@ -2100,7 +2035,7 @@ trait Types extends api.Types { self: SymbolTable =>
class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
require(sym.isModuleClass, sym)
private[this] var narrowedCache: Type = _
- override def isStable = true
+ override def isStable = pre.isStable
override def narrow = {
if (narrowedCache eq null)
narrowedCache = singleType(pre, sym.sourceModule)
@@ -2116,6 +2051,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
class PackageTypeRef(pre0: Type, sym0: Symbol) extends ModuleTypeRef(pre0, sym0) {
require(sym.isPackageClass, sym)
+ override def isStable = true
override protected def finishPrefix(rest: String) = packagePrefix + rest
}
class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
@@ -2244,7 +2180,7 @@ trait Types extends api.Types { self: SymbolTable =>
else ErrorType
}
- // isHKSubType0 introduces synthetic type params so that
+ // isHKSubType introduces synthetic type params so that
// betaReduce can first apply sym.info to typeArgs before calling
// asSeenFrom. asSeenFrom then skips synthetic type params, which
// are used to reduce HO subtyping to first-order subtyping, but
@@ -2318,7 +2254,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def isStable = bounds.hi.typeSymbol isSubClass SingletonClass
override def bounds = thisInfo.bounds
- // def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this
override def kind = "AbstractTypeRef"
}
@@ -2351,7 +2286,7 @@ trait Types extends api.Types { self: SymbolTable =>
h = mix(h, pre.hashCode)
h = mix(h, sym.hashCode)
if (hasArgs)
- finalizeHash(mix(h, args.hashCode), 3)
+ finalizeHash(mix(h, args.hashCode()), 3)
else
finalizeHash(h, 2)
}
@@ -2475,7 +2410,7 @@ trait Types extends api.Types { self: SymbolTable =>
case RepeatedParamClass => args.head + "*"
case ByNameParamClass => "=> " + args.head
case _ =>
- def targs = normalize.typeArgs
+ def targs = dealiasWiden.typeArgs
if (isFunctionType(this)) {
// Aesthetics: printing Function1 as T => R rather than (T) => R
@@ -2483,7 +2418,7 @@ trait Types extends api.Types { self: SymbolTable =>
// from (T1, T2) => R.
targs match {
case in :: out :: Nil if !isTupleType(in) =>
- // A => B => C should be (A => B) => C or A => (B => C).
+ // A => B => C should be (A => B) => C or A => (B => C)
// Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
// would mean => (A => B) which is a different type
val in_s = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
@@ -2839,7 +2774,7 @@ trait Types extends api.Types { self: SymbolTable =>
val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
op(underlying1) && {
- solve(tvars, quantifiedFresh, quantifiedFresh map (x => 0), false, depth) &&
+ solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), upper = false, depth) &&
isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst))
}
}
@@ -2857,9 +2792,13 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "OverloadedType"
}
- def overloadedType(pre: Type, alternatives: List[Symbol]): Type =
- if (alternatives.tail.isEmpty) pre memberType alternatives.head
- else OverloadedType(pre, alternatives)
+ /** The canonical creator for OverloadedTypes.
+ */
+ def overloadedType(pre: Type, alternatives: List[Symbol]): Type = alternatives match {
+ case Nil => NoType
+ case alt :: Nil => pre memberType alt
+ case _ => OverloadedType(pre, alternatives)
+ }
/** A class remembering a type instantiation for some a set of overloaded
* polymorphic symbols.
@@ -2867,23 +2806,12 @@ trait Types extends api.Types { self: SymbolTable =>
*/
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
- pre.toString + targs.mkString("(with type arguments ", ", ", ")");
+ pre.toString + targs.mkString("(with type arguments ", ", ", ")")
+
override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
-// override def memberType(sym: Symbol) = pre.memberType(sym) match {
-// case PolyType(tparams, restp) =>
-// restp.subst(tparams, targs)
-// /* I don't think this is needed, as existential types close only over value types
-// case ExistentialType(tparams, qtpe) =>
-// existentialAbstraction(tparams, qtpe.memberType(sym))
-// */
-// case ErrorType =>
-// ErrorType
-// }
override def kind = "AntiPolyType"
}
- //private var tidCount = 0 //DEBUG
-
object HasTypeMember {
def apply(name: TypeName, tp: Type): Type = {
val bound = refinedType(List(WildcardType), NoSymbol)
@@ -2898,13 +2826,10 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- // Not used yet.
- object HasTypeParams {
- def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match {
- case AnnotatedType(_, tp, _) => unapply(tp)
- case ExistentialType(tparams, qtpe) => Some((tparams, qtpe))
- case PolyType(tparams, restpe) => Some((tparams, restpe))
- case _ => None
+ object ArrayTypeRef {
+ def unapply(tp: Type) = tp match {
+ case TypeRef(_, ArrayClass, arg :: Nil) => Some(arg)
+ case _ => None
}
}
@@ -2977,20 +2902,6 @@ trait Types extends api.Types { self: SymbolTable =>
createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
}
- /** Repack existential types, otherwise they sometimes get unpacked in the
- * wrong location (type inference comes up with an unexpected skolem)
- */
- def repackExistential(tp: Type): Type = (
- if (tp == NoType) tp
- else existentialAbstraction(existentialsInType(tp), tp)
- )
-
- def containsExistential(tpe: Type) =
- tpe exists typeIsExistentiallyBound
-
- def existentialsInType(tpe: Type) =
- tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
-
/** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.)
*/
class HKTypeVar(
@@ -3001,7 +2912,6 @@ trait Types extends api.Types { self: SymbolTable =>
require(params.nonEmpty, this)
override def isHigherKinded = true
- override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
}
/** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
@@ -3016,10 +2926,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def params: List[Symbol] = zippedArgs map (_._1)
override def typeArgs: List[Type] = zippedArgs map (_._2)
-
- override protected def typeVarString = (
- zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
- )
}
trait UntouchableTypeVar extends TypeVar {
@@ -3049,7 +2955,7 @@ trait Types extends api.Types { self: SymbolTable =>
* Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
*/
abstract case class TypeVar(
- val origin: Type,
+ origin: Type,
var constr: TypeConstraint
) extends Type {
@@ -3069,7 +2975,6 @@ trait Types extends api.Types { self: SymbolTable =>
* in operations that are exposed from types. Hence, no syncing of `constr`
* or `encounteredHigherLevel` or `suspended` accesses should be necessary.
*/
-// var constr = constr0
def instValid = constr.instValid
override def isGround = instValid && constr.inst.isGround
@@ -3122,7 +3027,10 @@ trait Types extends api.Types { self: SymbolTable =>
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
def setInst(tp: Type) {
-// assert(!(tp containsTp this), this)
+ if (tp eq this) {
+ log(s"TypeVar cycle: called setInst passing $this to itself.")
+ return
+ }
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
@@ -3271,16 +3179,19 @@ trait Types extends api.Types { self: SymbolTable =>
def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
// println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (constr.instValid) "IV" else "")) //@MDEBUG
-// println("constr: "+ constr)
- def checkIsSameType(tp: Type) =
- if(typeVarLHS) constr.inst =:= tp
- else tp =:= constr.inst
+ def checkIsSameType(tp: Type) = (
+ if (typeVarLHS) constr.inst =:= tp
+ else tp =:= constr.inst
+ )
if (suspended) tp =:= origin
else if (constr.instValid) checkIsSameType(tp)
else isRelatable(tp) && {
val newInst = wildcardToTypeVarMap(tp)
- (constr isWithinBounds newInst) && { setInst(tp); true }
+ (constr isWithinBounds newInst) && {
+ setInst(newInst)
+ true
+ }
}
}
@@ -3291,7 +3202,7 @@ trait Types extends api.Types { self: SymbolTable =>
* (`T` corresponds to @param sym)
*/
def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
- registerBound(HasTypeMember(sym.name.toTypeName, tp), false)
+ registerBound(HasTypeMember(sym.name.toTypeName, tp), isLowerBound = false)
}
private def isSkolemAboveLevel(tp: Type) = tp.typeSymbol match {
@@ -3343,7 +3254,6 @@ trait Types extends api.Types { self: SymbolTable =>
).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#"
}
private def levelString = if (settings.explaintypes.value) level else ""
- protected def typeVarString = originName
override def safeToString = (
if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
else if (constr.inst ne NoType) "=?" + constr.inst
@@ -3454,22 +3364,12 @@ trait Types extends api.Types { self: SymbolTable =>
case class NamedType(name: Name, tp: Type) extends Type {
override def safeToString: String = name.toString +": "+ tp
}
-
- /** A De Bruijn index referring to a previous type argument. Only used
- * as a serialization format.
+ /** As with NamedType, used only when calling isApplicable.
+ * Records that the application has a wildcard star (aka _*)
+ * at the end of it.
*/
- case class DeBruijnIndex(level: Int, idx: Int, args: List[Type]) extends Type {
- override def safeToString: String = "De Bruijn index("+level+","+idx+")"
- }
-
- /** A binder defining data associated with De Bruijn indices. Only used
- * as a serialization format.
- */
- case class DeBruijnBinder(pnames: List[Name], ptypes: List[Type], restpe: Type) extends Type {
- override def safeToString = {
- val kind = if (pnames.head.isTypeName) "poly" else "method"
- "De Bruijn "+kind+"("+(pnames mkString ",")+";"+(ptypes mkString ",")+";"+restpe+")"
- }
+ case class RepeatedType(tp: Type) extends Type {
+ override def safeToString: String = tp + ": _*"
}
/** A temporary type representing the erasure of a user-defined value type.
@@ -3514,11 +3414,6 @@ trait Types extends api.Types { self: SymbolTable =>
(if (typeParams.isEmpty) "" else typeParamsString(this)) + super.safeToString
}
- // def mkLazyType(tparams: Symbol*)(f: Symbol => Unit): LazyType = (
- // if (tparams.isEmpty) new LazyType { override def complete(sym: Symbol) = f(sym) }
- // else new LazyPolyType(tparams.toList) { override def complete(sym: Symbol) = f(sym) }
- // )
-
// Creators ---------------------------------------------------------------
/** Rebind symbol `sym` to an overriding member in type `pre`. */
@@ -3563,10 +3458,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** The canonical creator for a refined type with an initially empty scope.
- *
- * @param parents ...
- * @param owner ...
- * @return ...
*/
def refinedType(parents: List[Type], owner: Symbol): Type =
refinedType(parents, owner, newScope, owner.pos)
@@ -3686,16 +3577,16 @@ trait Types extends api.Types { self: SymbolTable =>
tycon match {
case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
- case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args)
+ case TypeRef(pre, sym, Nil) => copyTypeRef(tycon, pre, sym, args)
+ case TypeRef(pre, sym, bogons) => devWarning(s"Dropping $bogons from $tycon in appliedType.") ; copyTypeRef(tycon, pre, sym, args)
case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args))
case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
- case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check
- case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args))
+ case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // @PP: Can this be right?
+ case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args)) // @PP: Can this be right?
case tv@TypeVar(_, _) => tv.applyArgs(args)
case AnnotatedType(annots, underlying, self) => AnnotatedType(annots, appliedType(underlying, args), self)
- case ErrorType => tycon
- case WildcardType => tycon // needed for neg/t0226
+ case ErrorType | WildcardType => tycon
case _ => abort(debugString(tycon))
}
}
@@ -3704,25 +3595,6 @@ trait Types extends api.Types { self: SymbolTable =>
def appliedType(tyconSym: Symbol, args: Type*): Type =
appliedType(tyconSym.typeConstructor, args.toList)
- /** A creator for existential types where the type arguments,
- * rather than being applied directly, are interpreted as the
- * upper bounds of unknown types. For instance if the type argument
- * list given is List(AnyRefClass), the resulting type would be
- * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] .
- */
- def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = {
- tycon match {
- case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
- val eparams = typeParamsToExistentials(sym)
- val bounds = args map (TypeBounds upper _)
- foreach2(eparams, bounds)(_ setInfo _)
-
- newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
- case _ =>
- appliedType(tycon, args)
- }
- }
-
/** A creator and extractor for type parameterizations that strips empty type parameter lists.
* Use this factory method to indicate the type has kind * (it's a polymorphic value)
* until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
@@ -3796,131 +3668,7 @@ trait Types extends api.Types { self: SymbolTable =>
newExistentialType(tparams1, tpe1)
}
- /** Normalize any type aliases within this type (@see Type#normalize).
- * Note that this depends very much on the call to "normalize", not "dealias",
- * so it is no longer carries the too-stealthy name "deAlias".
- */
- object normalizeAliases extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType =>
- def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
- mapOver(logResult(msg)(tp.normalize))
- case _ => mapOver(tp)
- }
- }
-
- /** Remove any occurrence of type <singleton> from this type and its parents */
- object dropSingletonType extends TypeMap {
- def apply(tp: Type): Type = {
- tp match {
- case TypeRef(_, SingletonClass, _) =>
- AnyClass.tpe
- case tp1 @ RefinedType(parents, decls) =>
- parents filter (_.typeSymbol != SingletonClass) match {
- case Nil => AnyClass.tpe
- case p :: Nil if decls.isEmpty => mapOver(p)
- case ps => mapOver(copyRefinedType(tp1, ps, decls))
- }
- case tp1 =>
- mapOver(tp1)
- }
- }
- }
-
- /** Substitutes the empty scope for any non-empty decls in the type. */
- object dropAllRefinements extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case rt @ RefinedType(parents, decls) if !decls.isEmpty =>
- mapOver(copyRefinedType(rt, parents, EmptyScope))
- case ClassInfoType(parents, decls, clazz) if !decls.isEmpty =>
- mapOver(ClassInfoType(parents, EmptyScope, clazz))
- case _ =>
- mapOver(tp)
- }
- }
-
- /** Type with all top-level occurrences of abstract types replaced by their bounds */
- def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
- case TypeRef(_, sym, _) if sym.isAbstractType =>
- abstractTypesToBounds(tp.bounds.hi)
- case TypeRef(_, sym, _) if sym.isAliasType =>
- abstractTypesToBounds(tp.normalize)
- case rtp @ RefinedType(parents, decls) =>
- copyRefinedType(rtp, parents mapConserve abstractTypesToBounds, decls)
- case AnnotatedType(_, underlying, _) =>
- abstractTypesToBounds(underlying)
- case _ =>
- tp
- }
-
- // Set to true for A* => Seq[A]
- // (And it will only rewrite A* in method result types.)
- // This is the pre-existing behavior.
- // Or false for Seq[A] => Seq[A]
- // (It will rewrite A* everywhere but method parameters.)
- // This is the specified behavior.
- protected def etaExpandKeepsStar = false
-
- /** Turn any T* types into Seq[T] except when
- * in method parameter position.
- */
- object dropRepeatedParamType extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case MethodType(params, restpe) =>
- // Not mapping over params
- val restpe1 = apply(restpe)
- if (restpe eq restpe1) tp
- else MethodType(params, restpe1)
- case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
- seqType(arg)
- case _ =>
- if (etaExpandKeepsStar) tp else mapOver(tp)
- }
- }
-
- object toDeBruijn extends TypeMap {
- private var paramStack: List[List[Symbol]] = Nil
- def mkDebruijnBinder(params: List[Symbol], restpe: Type) = {
- paramStack = params :: paramStack
- try {
- DeBruijnBinder(params map (_.name), params map (p => this(p.info)), this(restpe))
- } finally paramStack = paramStack.tail
- }
- def apply(tp: Type): Type = tp match {
- case PolyType(tparams, restpe) =>
- mkDebruijnBinder(tparams, restpe)
- case MethodType(params, restpe) =>
- mkDebruijnBinder(params, restpe)
- case TypeRef(NoPrefix, sym, args) =>
- val level = paramStack indexWhere (_ contains sym)
- if (level < 0) mapOver(tp)
- else DeBruijnIndex(level, paramStack(level) indexOf sym, args mapConserve this)
- case _ =>
- mapOver(tp)
- }
- }
- def fromDeBruijn(owner: Symbol) = new TypeMap {
- private var paramStack: List[List[Symbol]] = Nil
- def apply(tp: Type): Type = tp match {
- case DeBruijnBinder(pnames, ptypes, restpe) =>
- val isType = pnames.head.isTypeName
- val newParams = for (name <- pnames) yield
- if (isType) owner.newTypeParameter(name.toTypeName)
- else owner.newValueParameter(name.toTermName)
- paramStack = newParams :: paramStack
- try {
- foreach2(newParams, ptypes)((p, t) => p setInfo this(t))
- val restpe1 = this(restpe)
- if (isType) PolyType(newParams, restpe1)
- else MethodType(newParams, restpe1)
- } finally paramStack = paramStack.tail
- case DeBruijnIndex(level, idx, args) =>
- TypeRef(NoPrefix, paramStack(level)(idx), args map this)
- case _ =>
- mapOver(tp)
- }
- }
// Hash consing --------------------------------------------------------------
@@ -3940,121 +3688,6 @@ trait Types extends api.Types { self: SymbolTable =>
// Helper Classes ---------------------------------------------------------
- /** @PP: Unable to see why these apparently constant types should need vals
- * in every TypeConstraint, I lifted them out.
- */
- private lazy val numericLoBound = IntClass.tpe
- private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
-
- /** A class expressing upper and lower bounds constraints of type variables,
- * as well as their instantiations.
- */
- class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
- def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
- def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
- def this() = this(List(), List())
-
- /* Syncnote: Type constraints are assumed to be used from only one
- * thread. They are not exposed in api.Types and are used only locally
- * in operations that are exposed from types. Hence, no syncing of any
- * variables should be ncessesary.
- */
-
- /** Guard these lists against AnyClass and NothingClass appearing,
- * else loBounds.isEmpty will have different results for an empty
- * constraint and one with Nothing as a lower bound. [Actually
- * guarding addLoBound/addHiBound somehow broke raw types so it
- * only guards against being created with them.]
- */
- private var lobounds = lo0 filterNot typeIsNothing
- private var hibounds = hi0 filterNot typeIsAny
- private var numlo = numlo0
- private var numhi = numhi0
- private var avoidWidening = avoidWidening0
-
- def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
- def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
- def avoidWiden: Boolean = avoidWidening
-
- def addLoBound(tp: Type, isNumericBound: Boolean = false) {
- // For some reason which is still a bit fuzzy, we must let Nothing through as
- // a lower bound despite the fact that Nothing is always a lower bound. My current
- // supposition is that the side-effecting type constraint accumulation mechanism
- // depends on these subtype tests being performed to make forward progress when
- // there are mutally recursive type vars.
- // See pos/t6367 and pos/t6499 for the competing test cases.
- val mustConsider = tp.typeSymbol match {
- case NothingClass => true
- case _ => !(lobounds contains tp)
- }
- if (mustConsider) {
- if (isNumericBound && isNumericValueType(tp)) {
- if (numlo == NoType || isNumericSubType(numlo, tp))
- numlo = tp
- else if (!isNumericSubType(tp, numlo))
- numlo = numericLoBound
- }
- else lobounds ::= tp
- }
- }
-
- def checkWidening(tp: Type) {
- if(tp.isStable) avoidWidening = true
- else tp match {
- case HasTypeMember(_, _) => avoidWidening = true
- case _ =>
- }
- }
-
- def addHiBound(tp: Type, isNumericBound: Boolean = false) {
- // My current test case only demonstrates the need to let Nothing through as
- // a lower bound, but I suspect the situation is symmetrical.
- val mustConsider = tp.typeSymbol match {
- case AnyClass => true
- case _ => !(hibounds contains tp)
- }
- if (mustConsider) {
- checkWidening(tp)
- if (isNumericBound && isNumericValueType(tp)) {
- if (numhi == NoType || isNumericSubType(tp, numhi))
- numhi = tp
- else if (!isNumericSubType(numhi, tp))
- numhi = numericHiBound
- }
- else hibounds ::= tp
- }
- }
-
- def isWithinBounds(tp: Type): Boolean =
- lobounds.forall(_ <:< tp) &&
- hibounds.forall(tp <:< _) &&
- (numlo == NoType || (numlo weak_<:< tp)) &&
- (numhi == NoType || (tp weak_<:< numhi))
-
- var inst: Type = NoType // @M reduce visibility?
-
- def instValid = (inst ne null) && (inst ne NoType)
-
- def cloneInternal = {
- val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
- tc.inst = inst
- tc
- }
-
- override def toString = {
- val boundsStr = {
- val lo = loBounds filterNot typeIsNothing
- val hi = hiBounds filterNot typeIsAny
- val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
- val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
-
- lostr ++ histr mkString ("[", " | ", "]")
- }
- if (inst eq NoType) boundsStr
- else boundsStr + " _= " + inst.safeToString
- }
- }
-
class TypeUnwrapper(poly: Boolean, existential: Boolean, annotated: Boolean, nullary: Boolean) extends (Type => Type) {
def apply(tp: Type): Type = tp match {
case AnnotatedType(_, underlying, _) if annotated => apply(underlying)
@@ -4065,310 +3698,33 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
class ClassUnwrapper(existential: Boolean) extends TypeUnwrapper(poly = true, existential, annotated = true, nullary = false) {
- override def apply(tp: Type) = super.apply(tp.normalize)
+ override def apply(tp: Type) = super.apply(tp.normalize) // normalize is required here
}
object unwrapToClass extends ClassUnwrapper(existential = true) { }
object unwrapToStableClass extends ClassUnwrapper(existential = false) { }
object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { }
- trait AnnotationFilter extends TypeMap {
- def keepAnnotation(annot: AnnotationInfo): Boolean
-
- override def mapOver(annot: AnnotationInfo) =
- if (keepAnnotation(annot)) super.mapOver(annot)
- else UnmappableAnnotation
- }
-
- trait KeepOnlyTypeConstraints extends AnnotationFilter {
- // filter keeps only type constraint annotations
- def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
- }
-
- trait VariantTypeMap extends TypeMap {
- private[this] var _variance = 1
-
- override def variance = _variance
- def variance_=(x: Int) = _variance = x
-
- override protected def noChangeToSymbols(origSyms: List[Symbol]) =
- //OPT inline from forall to save on #closures
- origSyms match {
- case sym :: rest =>
- val v = variance
- if (sym.isAliasType) variance = 0
- val result = this(sym.info)
- variance = v
- (result eq sym.info) && noChangeToSymbols(rest)
- case _ =>
- true
- }
-
- override protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
- map2Conserve(args, tparams) { (arg, tparam) =>
- val v = variance
- if (tparam.isContravariant) variance = -variance
- else if (!tparam.isCovariant) variance = 0
- val arg1 = this(arg)
- variance = v
- arg1
- }
-
- /** Map this function over given type */
- override def mapOver(tp: Type): Type = tp match {
- case MethodType(params, result) =>
- variance = -variance
- val params1 = mapOver(params)
- variance = -variance
- val result1 = this(result)
- if ((params1 eq params) && (result1 eq result)) tp
- else copyMethodType(tp, params1, result1.substSym(params, params1))
- case PolyType(tparams, result) =>
- variance = -variance
- val tparams1 = mapOver(tparams)
- variance = -variance
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else PolyType(tparams1, result1.substSym(tparams, tparams1))
- case TypeBounds(lo, hi) =>
- variance = -variance
- val lo1 = this(lo)
- variance = -variance
- val hi1 = this(hi)
- if ((lo1 eq lo) && (hi1 eq hi)) tp
- else TypeBounds(lo1, hi1)
- case tr @ TypeRef(pre, sym, args) =>
- val pre1 = this(pre)
- val args1 =
- if (args.isEmpty)
- args
- else if (variance == 0) // fast & safe path: don't need to look at typeparams
- args mapConserve this
- else {
- val tparams = sym.typeParams
- if (tparams.isEmpty) args
- else mapOverArgs(args, tparams)
- }
- if ((pre1 eq pre) && (args1 eq args)) tp
- else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
- case _ =>
- super.mapOver(tp)
- }
- }
-
- // todo. move these into scala.reflect.api
-
- /** A prototype for mapping a function over all possible types
+ /** Repack existential types, otherwise they sometimes get unpacked in the
+ * wrong location (type inference comes up with an unexpected skolem)
*/
- abstract class TypeMap extends (Type => Type) {
- def apply(tp: Type): Type
-
- /** Mix in VariantTypeMap if you want variances to be significant.
- */
- def variance = 0
-
- /** Map this function over given type */
- def mapOver(tp: Type): Type = tp match {
- case tr @ TypeRef(pre, sym, args) =>
- val pre1 = this(pre)
- val args1 = args mapConserve this
- if ((pre1 eq pre) && (args1 eq args)) tp
- else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
- case ThisType(_) => tp
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else singleType(pre1, sym)
- }
- case MethodType(params, result) =>
- val params1 = mapOver(params)
- val result1 = this(result)
- if ((params1 eq params) && (result1 eq result)) tp
- else copyMethodType(tp, params1, result1.substSym(params, params1))
- case PolyType(tparams, result) =>
- val tparams1 = mapOver(tparams)
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else PolyType(tparams1, result1.substSym(tparams, tparams1))
- case NullaryMethodType(result) =>
- val result1 = this(result)
- if (result1 eq result) tp
- else NullaryMethodType(result1)
- case ConstantType(_) => tp
- case SuperType(thistp, supertp) =>
- val thistp1 = this(thistp)
- val supertp1 = this(supertp)
- if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
- else SuperType(thistp1, supertp1)
- case TypeBounds(lo, hi) =>
- val lo1 = this(lo)
- val hi1 = this(hi)
- if ((lo1 eq lo) && (hi1 eq hi)) tp
- else TypeBounds(lo1, hi1)
- case BoundedWildcardType(bounds) =>
- val bounds1 = this(bounds)
- if (bounds1 eq bounds) tp
- else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
- case rtp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve this
- val decls1 = mapOver(decls)
- //if ((parents1 eq parents) && (decls1 eq decls)) tp
- //else refinementOfClass(tp.typeSymbol, parents1, decls1)
- copyRefinedType(rtp, parents1, decls1)
- case ExistentialType(tparams, result) =>
- val tparams1 = mapOver(tparams)
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
- case OverloadedType(pre, alts) =>
- val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
- if (pre1 eq pre) tp
- else OverloadedType(pre1, alts)
- case AntiPolyType(pre, args) =>
- val pre1 = this(pre)
- val args1 = args mapConserve (this)
- if ((pre1 eq pre) && (args1 eq args)) tp
- else AntiPolyType(pre1, args1)
- case tv@TypeVar(_, constr) =>
- if (constr.instValid) this(constr.inst)
- else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
- case NotNullType(tp) =>
- val tp1 = this(tp)
- if (tp1 eq tp) tp
- else NotNullType(tp1)
- case AnnotatedType(annots, atp, selfsym) =>
- val annots1 = mapOverAnnotations(annots)
- val atp1 = this(atp)
- if ((annots1 eq annots) && (atp1 eq atp)) tp
- else if (annots1.isEmpty) atp1
- else AnnotatedType(annots1, atp1, selfsym)
- case DeBruijnIndex(shift, idx, args) =>
- val args1 = args mapConserve this
- if (args1 eq args) tp
- else DeBruijnIndex(shift, idx, args1)
-/*
- case ErrorType => tp
- case WildcardType => tp
- case NoType => tp
- case NoPrefix => tp
- case ErasedSingleType(sym) => tp
-*/
- case _ =>
- tp
- // throw new Error("mapOver inapplicable for " + tp);
- }
-
- protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
- args mapConserve this
-
- /** Called by mapOver to determine whether the original symbols can
- * be returned, or whether they must be cloned. Overridden in VariantTypeMap.
- */
- protected def noChangeToSymbols(origSyms: List[Symbol]) =
- origSyms forall (sym => sym.info eq this(sym.info))
-
- /** Map this function over given scope */
- def mapOver(scope: Scope): Scope = {
- val elems = scope.toList
- val elems1 = mapOver(elems)
- if (elems1 eq elems) scope
- else newScopeWith(elems1: _*)
- }
-
- /** Map this function over given list of symbols */
- def mapOver(origSyms: List[Symbol]): List[Symbol] = {
- // fast path in case nothing changes due to map
- if (noChangeToSymbols(origSyms)) origSyms
- // map is not the identity --> do cloning properly
- else cloneSymbolsAndModify(origSyms, TypeMap.this)
- }
-
- def mapOver(annot: AnnotationInfo): AnnotationInfo = {
- val AnnotationInfo(atp, args, assocs) = annot
- val atp1 = mapOver(atp)
- val args1 = mapOverAnnotArgs(args)
- // there is no need to rewrite assocs, as they are constants
-
- if ((args eq args1) && (atp eq atp1)) annot
- else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable
- else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
- }
-
- def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
- val annots1 = annots mapConserve mapOver
- if (annots1 eq annots) annots
- else annots1 filterNot (_ eq UnmappableAnnotation)
- }
-
- /** Map over a set of annotation arguments. If any
- * of the arguments cannot be mapped, then return Nil. */
- def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
- val args1 = args mapConserve mapOver
- if (args1 contains UnmappableTree) Nil
- else args1
- }
-
- def mapOver(tree: Tree): Tree =
- mapOver(tree, () => return UnmappableTree)
-
- /** Map a tree that is part of an annotation argument.
- * If the tree cannot be mapped, then invoke giveup().
- * The default is to transform the tree with
- * TypeMapTransformer.
- */
- def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
- (new TypeMapTransformer).transform(tree)
-
- /** This transformer leaves the tree alone except to remap
- * its types. */
- class TypeMapTransformer extends Transformer {
- override def transform(tree: Tree) = {
- val tree1 = super.transform(tree)
- val tpe1 = TypeMap.this(tree1.tpe)
- if ((tree eq tree1) && (tree.tpe eq tpe1))
- tree
- else
- tree1.shallowDuplicate.setType(tpe1)
- }
- }
- }
+ def repackExistential(tp: Type): Type = (
+ if (tp == NoType) tp
+ else existentialAbstraction(existentialsInType(tp), tp)
+ )
- abstract class TypeTraverser extends TypeMap {
- def traverse(tp: Type): Unit
- def apply(tp: Type): Type = { traverse(tp); tp }
- }
+ def containsExistential(tpe: Type) = tpe exists typeIsExistentiallyBound
+ def existentialsInType(tpe: Type) = tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
- abstract class TypeTraverserWithResult[T] extends TypeTraverser {
- def result: T
- def clear(): Unit
+ private def isDummyOf(tpe: Type)(targ: Type) = {
+ val sym = targ.typeSymbol
+ sym.isTypeParameter && sym.owner == tpe.typeSymbol
}
-
- abstract class TypeCollector[T](initial: T) extends TypeTraverser {
- var result: T = _
- def collect(tp: Type) = {
- result = initial
- traverse(tp)
- result
- }
+ def isDummyAppliedType(tp: Type) = tp.dealias match {
+ case tr @ TypeRef(_, _, args) => args exists isDummyOf(tr)
+ case _ => false
}
- /** A collector that tests for existential types appearing at given variance in a type
- * @PP: Commenting out due to not being used anywhere.
- */
- // class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
- // variance = v
- //
- // def traverse(tp: Type) = tp match {
- // case ExistentialType(_, _) if (variance == v) => result = true
- // case _ => mapOver(tp)
- // }
- // }
- //
- // val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
- // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
-
def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
val eparams = mapWithIndex(tparams)((tparam, i) =>
clazz.newExistential(newTypeName("?"+i), clazz.pos) setInfo tparam.info.bounds)
@@ -4378,630 +3734,23 @@ trait Types extends api.Types { self: SymbolTable =>
def typeParamsToExistentials(clazz: Symbol): List[Symbol] =
typeParamsToExistentials(clazz, clazz.typeParams)
+ def isRawIfWithoutArgs(sym: Symbol) = sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
+ /** Is type tp a ''raw type''? */
// note: it's important to write the two tests in this order,
// as only typeParams forces the classfile to be read. See #400
- private def isRawIfWithoutArgs(sym: Symbol) =
- sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
-
- def isRaw(sym: Symbol, args: List[Type]) =
- !phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
-
- /** Is type tp a ''raw type''? */
- def isRawType(tp: Type) = tp match {
- case TypeRef(_, sym, args) => isRaw(sym, args)
- case _ => false
- }
-
- /** The raw to existential map converts a ''raw type'' to an existential type.
- * It is necessary because we might have read a raw type of a
- * parameterized Java class from a class file. At the time we read the type
- * the corresponding class file might still not be read, so we do not
- * know what the type parameters of the type are. Therefore
- * the conversion of raw types to existential types might not have taken place
- * in ClassFileparser.sigToType (where it is usually done).
- */
- def rawToExistential = new TypeMap {
- private var expanded = immutable.Set[Symbol]()
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
- if (expanded contains sym) AnyRefClass.tpe
- else try {
- expanded += sym
- val eparams = mapOver(typeParamsToExistentials(sym))
- existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
- } finally {
- expanded -= sym
- }
- case _ =>
- mapOver(tp)
- }
- }
-
- /** Used by existentialAbstraction.
- */
- class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
- private val occurCount = mutable.HashMap[Symbol, Int]()
- private def countOccs(tp: Type) = {
- tp foreach {
- case TypeRef(_, sym, _) =>
- if (tparams contains sym)
- occurCount(sym) += 1
- case _ => ()
- }
- }
- def extrapolate(tpe: Type): Type = {
- tparams foreach (t => occurCount(t) = 0)
- countOccs(tpe)
- for (tparam <- tparams)
- countOccs(tparam.info)
-
- apply(tpe)
- }
-
- def apply(tp: Type): Type = {
- val tp1 = mapOver(tp)
- if (variance == 0) tp1
- else tp1 match {
- case TypeRef(pre, sym, args) if tparams contains sym =>
- val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
- //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
- if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
- repl
- else tp1
- case _ =>
- tp1
- }
- }
- override def mapOver(tp: Type): Type = tp match {
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if ((pre1 eq pre) || !pre1.isStable) tp
- else singleType(pre1, sym)
- }
- case _ => super.mapOver(tp)
- }
-
- // Do not discard the types of existential ident's. The
- // symbol of the Ident itself cannot be listed in the
- // existential's parameters, so the resulting existential
- // type would be ill-formed.
- override def mapOver(tree: Tree) = tree match {
- case Ident(_) if tree.tpe.isStable => tree
- case _ => super.mapOver(tree)
- }
- }
-
- def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
-
- /** Might the given symbol be important when calculating the prefix
- * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
- * the result will be `tp` unchanged if `pre` is trivial and `clazz`
- * is a symbol such that isPossiblePrefix(clazz) == false.
- */
- def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+ def isRawType(tp: Type) = !phase.erasedTypes && (tp match {
+ case TypeRef(_, sym, Nil) => isRawIfWithoutArgs(sym)
+ case _ => false
+ })
- private def skipPrefixOf(pre: Type, clazz: Symbol) = (
- (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ @deprecated("Use isRawType", "2.10.1") // presently used by sbt
+ def isRaw(sym: Symbol, args: List[Type]) = (
+ !phase.erasedTypes
+ && args.isEmpty
+ && isRawIfWithoutArgs(sym)
)
- /** A map to compute the asSeenFrom method */
- class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
- var capturedSkolems: List[Symbol] = List()
- var capturedParams: List[Symbol] = List()
-
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- object annotationArgRewriter extends TypeMapTransformer {
- private def canRewriteThis(sym: Symbol) = (
- (sym isNonBottomSubClass clazz)
- && (pre.widen.typeSymbol isNonBottomSubClass sym)
- && (pre.isStable || giveup())
- )
- // what symbol should really be used?
- private def newTermSym() = {
- val p = pre.typeSymbol
- p.owner.newValue(p.name.toTermName, p.pos) setInfo pre
- }
- /** Rewrite `This` trees in annotation argument trees */
- override def transform(tree: Tree): Tree = super.transform(tree) match {
- case This(_) if canRewriteThis(tree.symbol) => gen.mkAttributedQualifier(pre, newTermSym())
- case tree => tree
- }
- }
- annotationArgRewriter.transform(tree)
- }
-
- def stabilize(pre: Type, clazz: Symbol): Type = {
- capturedParams find (_.owner == clazz) match {
- case Some(qvar) => qvar.tpe
- case _ =>
- val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
- capturedParams ::= qvar
- qvar.tpe
- }
- }
-
- def apply(tp: Type): Type =
- tp match {
- case ThisType(sym) =>
- def toPrefix(pre: Type, clazz: Symbol): Type =
- if (skipPrefixOf(pre, clazz)) tp
- else if ((sym isNonBottomSubClass clazz) &&
- (pre.widen.typeSymbol isNonBottomSubClass sym)) {
- val pre1 = pre match {
- case SuperType(thistp, _) => thistp
- case _ => pre
- }
- if (!(pre1.isStable ||
- pre1.typeSymbol.isPackageClass ||
- pre1.typeSymbol.isModuleClass && pre1.typeSymbol.isStatic)) {
- stabilize(pre1, sym)
- } else {
- pre1
- }
- } else {
- toPrefix(pre.baseType(clazz).prefix, clazz.owner)
- }
- toPrefix(pre, clazz)
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else if (pre1.isStable) singleType(pre1, sym)
- else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
- }
- // AM: Martin, is this description accurate?
- // walk the owner chain of `clazz` (the original argument to asSeenFrom) until we find the type param's owner (while rewriting pre as we crawl up the owner chain)
- // once we're at the owner, extract the information that pre encodes about the type param,
- // by minimally subsuming pre to the type instance of the class that owns the type param,
- // the type we're looking for is the type instance's type argument at the position corresponding to the type parameter
- // optimisation: skip this type parameter if it's not owned by a class, as those params are not influenced by the prefix through which they are seen
- // (concretely: type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion)
- // (skolems also aren't affected: they are ruled out by the isTypeParameter check)
- case TypeRef(prefix, sym, args) if (sym.isTypeParameter && sym.owner.isClass) =>
- def toInstance(pre: Type, clazz: Symbol): Type =
- if (skipPrefixOf(pre, clazz)) mapOver(tp)
- //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
- else {
- def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
-
- val symclazz = sym.owner
- if (symclazz == clazz && !pre.widen.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
- // have to deconst because it may be a Class[T].
- pre.baseType(symclazz).deconst match {
- case TypeRef(_, basesym, baseargs) =>
-
- def instParam(ps: List[Symbol], as: List[Type]): Type =
- if (ps.isEmpty) {
- if (forInteractive) {
- val saved = settings.uniqid.value
- try {
- settings.uniqid.value = true
- println("*** stale type parameter: " + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
- println("*** confused with params: " + sym + " in " + sym.owner + " not in " + ps + " of " + basesym)
- println("*** stacktrace = ")
- new Error().printStackTrace()
- } finally settings.uniqid.value = saved
- instParamRelaxed(basesym.typeParams, baseargs)
- } else throwError
- } else if (sym eq ps.head)
- // @M! don't just replace the whole thing, might be followed by type application
- appliedType(as.head, args mapConserve (this)) // @M: was as.head
- else instParam(ps.tail, as.tail)
-
- /** Relaxed version of instParams which matches on names not symbols.
- * This is a last fallback in interactive mode because races in calls
- * from the IDE to the compiler may in rare cases lead to symbols referring
- * to type parameters that are no longer current.
- */
- def instParamRelaxed(ps: List[Symbol], as: List[Type]): Type =
- if (ps.isEmpty) throwError
- else if (sym.name == ps.head.name)
- // @M! don't just replace the whole thing, might be followed by type application
- appliedType(as.head, args mapConserve (this)) // @M: was as.head
- else instParamRelaxed(ps.tail, as.tail)
-
- //Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
- if (sameLength(basesym.typeParams, baseargs))
- instParam(basesym.typeParams, baseargs)
- else
- if (symclazz.tpe.parents exists typeIsErroneous)
- ErrorType // don't be to overzealous with throwing exceptions, see #2641
- else
- throw new Error(
- "something is wrong (wrong class file?): "+basesym+
- " with type parameters "+
- basesym.typeParams.map(_.name).mkString("[",",","]")+
- " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase)
- case ExistentialType(tparams, qtpe) =>
- capturedSkolems = capturedSkolems union tparams
- toInstance(qtpe, clazz)
- case t =>
- throwError
- }
- } else toInstance(pre.baseType(clazz).prefix, clazz.owner)
- }
- toInstance(pre, clazz)
- case _ =>
- mapOver(tp)
- }
- }
-
- /** A base class to compute all substitutions */
- abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
- assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
-
- /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
- protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
-
- /** Map target to type, can be tuned by subclasses */
- protected def toType(fromtp: Type, tp: T): Type
-
- protected def renameBoundSyms(tp: Type): Type = tp match {
- case MethodType(ps, restp) =>
- createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
- case PolyType(bs, restp) =>
- createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
- case ExistentialType(bs, restp) =>
- createFromClonedSymbols(bs, restp)(newExistentialType)
- case _ =>
- tp
- }
-
- def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
- @tailrec def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
- if (from.isEmpty) tp
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
- else if (matches(from.head, sym)) toType(tp, to.head)
- else subst(tp, sym, from.tail, to.tail)
-
- val boundSyms = tp0.boundSyms
- val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
- val tp = mapOver(tp1)
-
- tp match {
- // @M
- // 1) arguments must also be substituted (even when the "head" of the
- // applied type has already been substituted)
- // example: (subst RBound[RT] from [type RT,type RBound] to
- // [type RT&,type RBound&]) = RBound&[RT&]
- // 2) avoid loops (which occur because alpha-conversion is
- // not performed properly imo)
- // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
- // we must replace the a in Iterable[a] by (a,b)
- // (must not recurse --> loops)
- // 3) replacing m by List in m[Int] should yield List[Int], not just List
- case TypeRef(NoPrefix, sym, args) =>
- appliedType(subst(tp, sym, from, to), args) // if args.isEmpty, appliedType is the identity
- case SingleType(NoPrefix, sym) =>
- subst(tp, sym, from, to)
- case _ =>
- tp
- }
- }
- }
-
- /** A map to implement the `substSym` method. */
- class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
- def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
-
- protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
- case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
- case SingleType(pre, _) => singleType(pre, sym)
- }
- override def apply(tp: Type): Type = if (from.isEmpty) tp else {
- @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
- if (from.isEmpty) sym
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
- else if (matches(from.head, sym)) to.head
- else subst(sym, from.tail, to.tail)
- tp match {
- case TypeRef(pre, sym, args) if pre ne NoPrefix =>
- val newSym = subst(sym, from, to)
- // mapOver takes care of subst'ing in args
- mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
- // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
- case SingleType(pre, sym) if pre ne NoPrefix =>
- val newSym = subst(sym, from, to)
- mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
- case _ =>
- super.apply(tp)
- }
- }
-
- object mapTreeSymbols extends TypeMapTransformer {
- val strictCopy = newStrictTreeCopier
-
- def termMapsTo(sym: Symbol) = from indexOf sym match {
- case -1 => None
- case idx => Some(to(idx))
- }
-
- // if tree.symbol is mapped to another symbol, passes the new symbol into the
- // constructor `trans` and sets the symbol and the type on the resulting tree.
- def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
- case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
- case None => tree
- }
-
- // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
- override def transform(tree: Tree) = {
- // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
- super.transform(tree) match {
- case id @ Ident(_) =>
- transformIfMapped(id)(toSym =>
- strictCopy.Ident(id, toSym.name))
-
- case sel @ Select(qual, name) =>
- transformIfMapped(sel)(toSym =>
- strictCopy.Select(sel, qual, toSym.name))
-
- case tree => tree
- }
- }
- }
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- mapTreeSymbols.transform(tree)
- }
- }
-
- /** A map to implement the `subst` method. */
- class SubstTypeMap(from: List[Symbol], to: List[Type])
- extends SubstMap(from, to) {
- protected def toType(fromtp: Type, tp: Type) = tp
-
- override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
- object trans extends TypeMapTransformer {
- override def transform(tree: Tree) = tree match {
- case Ident(name) =>
- from indexOf tree.symbol match {
- case -1 => super.transform(tree)
- case idx =>
- val totpe = to(idx)
- if (totpe.isStable) tree.duplicate setType totpe
- else giveup()
- }
- case _ =>
- super.transform(tree)
- }
- }
- trans.transform(tree)
- }
- }
-
- /** A map to implement the `substThis` method. */
- class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) if (sym == from) => to
- case _ => mapOver(tp)
- }
- }
-
- class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
- def apply(tp: Type): Type = try {
- tp match {
- case TypeRef(_, sym, _) if from contains sym =>
- BoundedWildcardType(sym.info.bounds)
- case _ =>
- mapOver(tp)
- }
- } catch {
- case ex: MalformedType =>
- WildcardType
- }
- }
-
-// dependent method types
- object IsDependentCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (tp.isImmediatelyDependent) result = true
- else if (!result) mapOver(tp)
- }
- }
-
- object ApproximateDependentMap extends TypeMap {
- def apply(tp: Type): Type =
- if (tp.isImmediatelyDependent) WildcardType
- else mapOver(tp)
- }
-
- /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
- */
- class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
- private val actuals = actuals0.toIndexedSeq
- private val existentials = new Array[Symbol](actuals.size)
- def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
-
- private object StableArg {
- def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
- tp.isStable && (tp.typeSymbol != NothingClass)
- )
- }
- private object Arg {
- def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
- }
-
- def apply(tp: Type): Type = mapOver(tp) match {
- // unsound to replace args by unstable actual #3873
- case SingleType(NoPrefix, StableArg(arg)) => arg
- // (soundly) expand type alias selections on implicit arguments,
- // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
- case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
- val arg = actuals(pid)
- val res = typeRef(arg, sym, targs)
- if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
- // don't return the original `tp`, which may be different from `tp1`,
- // due to dropping annotations
- case tp1 => tp1
- }
-
- /* Return the type symbol for referencing a parameter inside the existential quantifier.
- * (Only needed if the actual is unstable.)
- */
- private def existentialFor(pid: Int) = {
- if (existentials(pid) eq null) {
- val param = params(pid)
- existentials(pid) = (
- param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
- setInfo singletonBounds(actuals(pid))
- )
- }
- existentials(pid)
- }
-
- //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
- override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
- // TODO: this should be simplified; in the stable case, one can
- // probably just use an Ident to the tree.symbol.
- //
- // @PP: That leads to failure here, where stuff no longer has type
- // 'String @Annot("stuff")' but 'String @Annot(x)'.
- //
- // def m(x: String): String @Annot(x) = x
- // val stuff = m("stuff")
- //
- // (TODO cont.) Why an existential in the non-stable case?
- //
- // @PP: In the following:
- //
- // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
- //
- // m is typed as 'String @Annot(x) forSome { val x: String }'.
- //
- // Both examples are from run/constrained-types.scala.
- object treeTrans extends Transformer {
- override def transform(tree: Tree): Tree = tree.symbol match {
- case StableArg(actual) =>
- gen.mkAttributedQualifier(actual, tree.symbol)
- case Arg(pid) =>
- val sym = existentialFor(pid)
- Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
- case _ =>
- super.transform(tree)
- }
- }
- treeTrans transform arg
- }
- }
-
- object StripAnnotationsMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case AnnotatedType(_, atp, _) =>
- mapOver(atp)
- case tp =>
- mapOver(tp)
- }
- }
-
- /** A map to convert every occurrence of a wildcard type to a fresh
- * type variable */
- object wildcardToTypeVarMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case WildcardType =>
- TypeVar(tp, new TypeConstraint)
- case BoundedWildcardType(bounds) =>
- TypeVar(tp, new TypeConstraint(bounds))
- case _ =>
- mapOver(tp)
- }
- }
-
- /** A map to convert every occurrence of a type variable to a wildcard type. */
- object typeVarToOriginMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeVar(origin, _) => origin
- case _ => mapOver(tp)
- }
- }
-
- /** A map to implement the `contains` method. */
- class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- tp.normalize match {
- case TypeRef(_, sym1, _) if (sym == sym1) => result = true
- case SingleType(_, sym1) if (sym == sym1) => result = true
- case _ => mapOver(tp)
- }
- }
- }
-
- override def mapOver(arg: Tree) = {
- for (t <- arg) {
- traverse(t.tpe)
- if (t.symbol == sym)
- result = true
- }
- arg
- }
- }
-
- /** A map to implement the `contains` method. */
- class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- if (tp eq t) result = true
- else mapOver(tp)
- }
- }
- override def mapOver(arg: Tree) = {
- for (t <- arg)
- traverse(t.tpe)
-
- arg
- }
- }
-
- /** A map to implement the `filter` method. */
- class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
- def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp))
-
- override def collect(tp: Type) = super.collect(tp).reverse
-
- def traverse(tp: Type) {
- if (p(tp)) result ::= tp
- mapOver(tp)
- }
- }
-
- /** A map to implement the `collect` method. */
- class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
- override def collect(tp: Type) = super.collect(tp).reverse
-
- def traverse(tp: Type) {
- if (pf.isDefinedAt(tp)) result ::= pf(tp)
- mapOver(tp)
- }
- }
-
- class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
- def traverse(tp: Type) {
- f(tp)
- mapOver(tp)
- }
- }
-
- /** A map to implement the `filter` method. */
- class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
- def traverse(tp: Type) {
- if (result.isEmpty) {
- if (p(tp)) result = Some(tp)
- mapOver(tp)
- }
- }
- }
-
- /** A map to implement the `contains` method. */
- object ErroneousCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- result = tp.isError
- mapOver(tp)
- }
- }
- }
+ def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
/**
* A more persistent version of `Type#memberType` which does not require
@@ -5048,204 +3797,10 @@ trait Types extends api.Types { self: SymbolTable =>
result
}
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given type.
- */
- private def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
-
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given list
- * of types.
- */
- private def commonOwner(tps: List[Type]): Symbol = {
- if (tps.isEmpty) NoSymbol
- else {
- commonOwnerMap.clear()
- tps foreach (commonOwnerMap traverse _)
- if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
- }
- }
-
- protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
-
- protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
- var result: Symbol = _
-
- def clear() { result = null }
-
- private def register(sym: Symbol) {
- // First considered type is the trivial result.
- if ((result eq null) || (sym eq NoSymbol))
- result = sym
- else
- while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
- result = result.owner
- }
- def traverse(tp: Type) = tp.normalize match {
- case ThisType(sym) => register(sym)
- case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
- case SingleType(NoPrefix, sym) => register(sym.owner)
- case _ => mapOver(tp)
- }
- }
-
- private lazy val commonOwnerMapObj = new CommonOwnerMap
-
class MissingAliasControl extends ControlThrowable
val missingAliasException = new MissingAliasControl
class MissingTypeControl extends ControlThrowable
- object adaptToNewRunMap extends TypeMap {
-
- private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
- if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
- sym
- else if (sym.isModuleClass) {
- val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
-
- sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
- val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
- debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
- sym
- }
- }
- else {
- var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) orElse {
- if (sym.isAliasType) throw missingAliasException
- debugwarn(pre+"."+sym+" does no longer exist, phase = "+phase)
- throw new MissingTypeControl // For build manager and presentation compiler purposes
- }
- /** The two symbols have the same fully qualified name */
- def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
- if (!corresponds(sym.owner, rebind0.owner)) {
- debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
- val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
- if (bcs.isEmpty)
- assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
- else
- rebind0 = pre.baseType(bcs.head).member(sym.name)
- debuglog(
- "ADAPT2 pre = " + pre +
- ", bcs.head = " + bcs.head +
- ", sym = " + sym.fullLocationString +
- ", rebind = " + rebind0.fullLocationString
- )
- }
- rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
- debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
- throw new MalformedType(pre, sym.nameString)
- }
- }
- }
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) =>
- try {
- val sym1 = adaptToNewRun(sym.owner.thisType, sym)
- if (sym1 == sym) tp else ThisType(sym1)
- } catch {
- case ex: MissingTypeControl =>
- tp
- }
- case SingleType(pre, sym) =>
- if (sym.isPackage) tp
- else {
- val pre1 = this(pre)
- try {
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym)) tp
- else singleType(pre1, sym1)
- } catch {
- case _: MissingTypeControl =>
- tp
- }
- }
- case TypeRef(pre, sym, args) =>
- if (sym.isPackageClass) tp
- else {
- val pre1 = this(pre)
- val args1 = args mapConserve (this)
- try {
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
- tp
- } else if (sym1 == NoSymbol) {
- debugwarn("adapt fail: "+pre+" "+pre1+" "+sym)
- tp
- } else {
- copyTypeRef(tp, pre1, sym1, args1)
- }
- } catch {
- case ex: MissingAliasControl =>
- apply(tp.dealias)
- case _: MissingTypeControl =>
- tp
- }
- }
- case MethodType(params, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else copyMethodType(tp, params, restp1)
- case NullaryMethodType(restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else NullaryMethodType(restp1)
- case PolyType(tparams, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else PolyType(tparams, restp1)
-
- // Lukas: we need to check (together) whether we should also include parameter types
- // of PolyType and MethodType in adaptToNewRun
-
- case ClassInfoType(parents, decls, clazz) =>
- if (clazz.isPackageClass) tp
- else {
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else ClassInfoType(parents1, decls, clazz)
- }
- case RefinedType(parents, decls) =>
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
- case SuperType(_, _) => mapOver(tp)
- case TypeBounds(_, _) => mapOver(tp)
- case TypeVar(_, _) => mapOver(tp)
- case AnnotatedType(_,_,_) => mapOver(tp)
- case NotNullType(_) => mapOver(tp)
- case ExistentialType(_, _) => mapOver(tp)
- case _ => tp
- }
- }
-
- class SubTypePair(val tp1: Type, val tp2: Type) {
- override def hashCode = tp1.hashCode * 41 + tp2.hashCode
- override def equals(other: Any) = other match {
- case stp: SubTypePair =>
- // suspend TypeVars in types compared by =:=,
- // since we don't want to mutate them simply to check whether a subtype test is pending
- // in addition to making subtyping "more correct" for type vars,
- // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
- // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
- def suspend(tp: Type) =
- if (tp.isGround) null else suspendTypeVarsInType(tp)
- def revive(suspension: List[TypeVar]) =
- if (suspension ne null) suspension foreach (_.suspended = false)
-
- val suspensions = Array(tp1, stp.tp1, tp2, stp.tp2) map suspend
-
- val sameTypes = (tp1 =:= stp.tp1) && (tp2 =:= stp.tp2)
-
- suspensions foreach revive
-
- sameTypes
- case _ =>
- false
- }
- override def toString = tp1+" <:<? "+tp2
- }
-
// Helper Methods -------------------------------------------------------------
/** The maximum allowable depth of lubs or glbs over types `ts`.
@@ -5267,41 +3822,9 @@ trait Types extends api.Types { self: SymbolTable =>
else if (bd <= 7) td max (bd - 2)
else (td - 1) max (bd - 3)
- /** The maximum depth of type `tp` */
- def typeDepth(tp: Type): Int = tp match {
- case TypeRef(pre, sym, args) =>
- typeDepth(pre) max typeDepth(args) + 1
- case RefinedType(parents, decls) =>
- typeDepth(parents) max typeDepth(decls.toList.map(_.info)) + 1
- case TypeBounds(lo, hi) =>
- typeDepth(lo) max typeDepth(hi)
- case MethodType(paramtypes, result) =>
- typeDepth(result)
- case NullaryMethodType(result) =>
- typeDepth(result)
- case PolyType(tparams, result) =>
- typeDepth(result) max typeDepth(tparams map (_.info)) + 1
- case ExistentialType(tparams, result) =>
- typeDepth(result) max typeDepth(tparams map (_.info)) + 1
- case _ =>
- 1
- }
-
- private def maxDepth(tps: List[Type], by: Type => Int): Int = {
- //OPT replaced with tailrecursive function to save on #closures
- // was:
- // var d = 0
- // for (tp <- tps) d = d max by(tp) //!!!OPT!!!
- // d
- def loop(tps: List[Type], acc: Int): Int = tps match {
- case tp :: rest => loop(rest, acc max by(tp))
- case _ => acc
- }
- loop(tps, 0)
- }
-
- private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth)
- private def baseTypeSeqDepth(tps: List[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth)
+ private def symTypeDepth(syms: List[Symbol]): Int = typeDepth(syms map (_.info))
+ private def typeDepth(tps: List[Type]): Int = maxDepth(tps)
+ private def baseTypeSeqDepth(tps: List[Type]): Int = maxBaseTypeSeqDepth(tps)
/** Is intersection of given types populated? That is,
* for all types tp1, tp2 in intersection
@@ -5314,32 +3837,35 @@ trait Types extends api.Types { self: SymbolTable =>
def isPopulated(tp1: Type, tp2: Type): Boolean = {
def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- assert(sym1 == sym2)
- pre1 =:= pre2 &&
- forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
- //if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
- if (tparam.variance == 0) arg1 =:= arg2
- else if (arg1.isInstanceOf[TypeVar])
- // if left-hand argument is a typevar, make it compatible with variance
- // this is for more precise pattern matching
- // todo: work this in the spec of this method
- // also: think what happens if there are embedded typevars?
- if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
- else true
- }
+ assert(sym1 == sym2, (sym1, sym2))
+ ( pre1 =:= pre2
+ && forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
+ // if left-hand argument is a typevar, make it compatible with variance
+ // this is for more precise pattern matching
+ // todo: work this in the spec of this method
+ // also: think what happens if there are embedded typevars?
+ if (tparam.variance.isInvariant)
+ arg1 =:= arg2
+ else !arg1.isInstanceOf[TypeVar] || {
+ if (tparam.variance.isContravariant) arg1 <:< arg2
+ else arg2 <:< arg1
+ }
+ }
+ )
case (et: ExistentialType, _) =>
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
et.withTypeVars(isConsistent(tp1, _))
}
- def check(tp1: Type, tp2: Type) =
+ def check(tp1: Type, tp2: Type) = (
if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL))
tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol)
else tp1.baseClasses forall (bc =>
tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc)))
+ )
- check(tp1, tp2)/* && check(tp2, tp1)*/ // need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also.
+ check(tp1, tp2) && check(tp2, tp1)
}
/** Does a pattern of type `patType` need an outer test when executed against
@@ -5382,82 +3908,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- private var subsametypeRecursions: Int = 0
-
- private def isUnifiable(pre1: Type, pre2: Type) =
- (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
-
- /** Returns true iff we are past phase specialize,
- * sym1 and sym2 are two existential skolems with equal names and bounds,
- * and pre1 and pre2 are equal prefixes
- */
- private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
- sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
- sym1.name == sym2.name &&
- phase.specialized &&
- sym1.info =:= sym2.info &&
- pre1 =:= pre2
- }
-
- private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
- if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
- if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2")
- true
- } else
- false
-
- private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
- else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
-
- /** Do `tp1` and `tp2` denote equivalent types? */
- def isSameType(tp1: Type, tp2: Type): Boolean = try {
- if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
- subsametypeRecursions += 1
- //OPT cutdown on Function0 allocation
- //was:
-// undoLog undoUnless {
-// isSameType1(tp1, tp2)
-// }
-
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
-
- try result = {
- isSameType1(tp1, tp2)
- } finally if (!result) undoLog.undoTo(before)
- result
- } finally undoLog.unlock()
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
- subsametypeRecursions += 1
- undoLog undo { // undo type constraints that arise from operations in this block
- !isSameType1(tp1, tp2)
- }
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
- case TypeRef(pre1, sym1, _) =>
- tp2 match {
- case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
- case _ => true
- }
- case _ => true
- }
-
def normalizePlus(tp: Type) =
if (isRawType(tp)) rawToExistential(tp)
else tp.normalize
@@ -5472,288 +3922,7 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => tp.normalize
}
*/
-/*
- private def isSameType0(tp1: Type, tp2: Type): Boolean = {
- if (tp1 eq tp2) return true
- ((tp1, tp2) match {
- case (ErrorType, _) => true
- case (WildcardType, _) => true
- case (_, ErrorType) => true
- case (_, WildcardType) => true
-
- case (NoType, _) => false
- case (NoPrefix, _) => tp2.typeSymbol.isPackageClass
- case (_, NoType) => false
- case (_, NoPrefix) => tp1.typeSymbol.isPackageClass
-
- case (ThisType(sym1), ThisType(sym2))
- if (sym1 == sym2) =>
- true
- case (SingleType(pre1, sym1), SingleType(pre2, sym2))
- if (equalSymsAndPrefixes(sym1, pre1, sym2, pre2)) =>
- true
-/*
- case (SingleType(pre1, sym1), ThisType(sym2))
- if (sym1.isModule &&
- sym1.moduleClass == sym2 &&
- pre1 =:= sym2.owner.thisType) =>
- true
- case (ThisType(sym1), SingleType(pre2, sym2))
- if (sym2.isModule &&
- sym2.moduleClass == sym1 &&
- pre2 =:= sym1.owner.thisType) =>
- true
-*/
- case (ConstantType(value1), ConstantType(value2)) =>
- value1 == value2
- case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- equalSymsAndPrefixes(sym1, pre1, sym2, pre2) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(args1, args2))
- // @M! normalize reduces higher-kinded case to PolyType's
- case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- isSameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
- case (MethodType(params1, res1), MethodType(params2, res2)) =>
- // new dependent types: probably fix this, use substSym as done for PolyType
- (isSameTypes(tp1.paramTypes, tp2.paramTypes) &&
- res1 =:= res2 &&
- tp1.isImplicit == tp2.isImplicit)
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1)
- case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1)
- case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
- lo1 =:= lo2 && hi1 =:= hi2
- case (BoundedWildcardType(bounds), _) =>
- bounds containsType tp2
- case (_, BoundedWildcardType(bounds)) =>
- bounds containsType tp1
- case (tv @ TypeVar(_,_), tp) =>
- tv.registerTypeEquality(tp, true)
- case (tp, tv @ TypeVar(_,_)) =>
- tv.registerTypeEquality(tp, false)
- case (AnnotatedType(_,_,_), _) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case (_, AnnotatedType(_,_,_)) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case (_: SingletonType, _: SingletonType) =>
- var origin1 = tp1
- while (origin1.underlying.isInstanceOf[SingletonType]) {
- assert(origin1 ne origin1.underlying, origin1)
- origin1 = origin1.underlying
- }
- var origin2 = tp2
- while (origin2.underlying.isInstanceOf[SingletonType]) {
- assert(origin2 ne origin2.underlying, origin2)
- origin2 = origin2.underlying
- }
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
-*/
- private def isSameType1(tp1: Type, tp2: Type): Boolean = {
- if ((tp1 eq tp2) ||
- (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
- (tp2 eq ErrorType) || (tp2 eq WildcardType))
- true
- else if ((tp1 eq NoType) || (tp2 eq NoType))
- false
- else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp2.typeSymbol.isPackageClass
- else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp1.typeSymbol.isPackageClass
- else {
- isSameType2(tp1, tp2) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
- }
- def isSameType2(tp1: Type, tp2: Type): Boolean = {
- tp1 match {
- case tr1: TypeRef =>
- tp2 match {
- case tr2: TypeRef =>
- return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(tr1.args, tr2.args))) ||
- ((tr1.pre, tr2.pre) match {
- case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
- case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
- case _ => false
- })
- case _: SingleType =>
- return isSameType2(tp2, tp1) // put singleton type on the left, caught below
- case _ =>
- }
- case tt1: ThisType =>
- tp2 match {
- case tt2: ThisType =>
- if (tt1.sym == tt2.sym) return true
- case _ =>
- }
- case st1: SingleType =>
- tp2 match {
- case st2: SingleType =>
- if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
- case TypeRef(pre2, sym2, Nil) =>
- if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
- case _ =>
- }
- case ct1: ConstantType =>
- tp2 match {
- case ct2: ConstantType =>
- return (ct1.value == ct2.value)
- case _ =>
- }
- case rt1: RefinedType =>
- tp2 match {
- case rt2: RefinedType => //
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- return isSameTypes(rt1.parents, rt2.parents) && {
- val decls1 = rt1.decls
- val decls2 = rt2.decls
- isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
- }
- case _ =>
- }
- case mt1: MethodType =>
- tp2 match {
- case mt2: MethodType =>
- return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
- mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
- mt1.isImplicit == mt2.isImplicit
- // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
- case _ =>
- }
- case NullaryMethodType(restpe1) =>
- tp2 match {
- // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
- case NullaryMethodType(restpe2) =>
- return restpe1 =:= restpe2
- case _ =>
- }
- case PolyType(tparams1, res1) =>
- tp2 match {
- case PolyType(tparams2, res2) =>
-// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- // @M looks like it might suffer from same problem as #2210
- return (
- (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case ExistentialType(tparams1, res1) =>
- tp2 match {
- case ExistentialType(tparams2, res2) =>
- // @M looks like it might suffer from same problem as #2210
- return (
- // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
- sameLength(tparams1, tparams2) &&
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case TypeBounds(lo1, hi1) =>
- tp2 match {
- case TypeBounds(lo2, hi2) =>
- return lo1 =:= lo2 && hi1 =:= hi2
- case _ =>
- }
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp2
- case _ =>
- }
- tp2 match {
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp1
- case _ =>
- }
- tp1 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp2, true)
- case _ =>
- }
- tp2 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp1, false)
- case _ =>
- }
- tp1 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp2 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp1 match {
- case _: SingletonType =>
- tp2 match {
- case _: SingletonType =>
- def chaseDealiasedUnderlying(tp: Type): Type = {
- var origin = tp
- var next = origin.underlying.dealias
- while (next.isInstanceOf[SingletonType]) {
- assert(origin ne next, origin)
- origin = next
- next = origin.underlying.dealias
- }
- origin
- }
- val origin1 = chaseDealiasedUnderlying(tp1)
- val origin2 = chaseDealiasedUnderlying(tp2)
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }
- case _ =>
- false
- }
- }
/** Are `tps1` and `tps2` lists of pairwise equivalent types? */
def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
@@ -5771,64 +3940,9 @@ trait Types extends api.Types { self: SymbolTable =>
*/
final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
- private val pendingSubTypes = new mutable.HashSet[SubTypePair]
private var basetypeRecursions: Int = 0
private val pendingBaseTypes = new mutable.HashSet[Type]
- def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
-
- def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
- subsametypeRecursions += 1
-
- //OPT cutdown on Function0 allocation
- //was:
-// undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
-// if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
-// val p = new SubTypePair(tp1, tp2)
-// if (pendingSubTypes(p))
-// false
-// else
-// try {
-// pendingSubTypes += p
-// isSubType2(tp1, tp2, depth)
-// } finally {
-// pendingSubTypes -= p
-// }
-// } else {
-// isSubType2(tp1, tp2, depth)
-// }
-// }
-
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
-
- try result = { // if subtype test fails, it should not affect constraints on typevars
- if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
- val p = new SubTypePair(tp1, tp2)
- if (pendingSubTypes(p))
- false
- else
- try {
- pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
- } finally {
- pendingSubTypes -= p
- }
- } else {
- isSubType2(tp1, tp2, depth)
- }
- } finally if (!result) undoLog.undoTo(before)
-
- result
- } finally undoLog.unlock()
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
@@ -5846,18 +3960,6 @@ trait Types extends api.Types { self: SymbolTable =>
false
}
- @deprecated("The compiler doesn't use this so you shouldn't either - it will be removed", "2.10.0")
- def instTypeVar(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- copyTypeRef(tp, instTypeVar(pre), sym, args)
- case SingleType(pre, sym) =>
- singleType(instTypeVar(pre), sym)
- case TypeVar(_, constr) =>
- instTypeVar(constr.inst)
- case _ =>
- tp
- }
-
def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
def isSingleType(tp: Type) = tp match {
@@ -5895,10 +3997,11 @@ trait Types extends api.Types { self: SymbolTable =>
* types which are used internally in type applications and
* types which are not.
*/
+ /**** Not used right now, but kept around to document which Types
+ * land in which bucket.
private def isInternalTypeNotUsedAsTypeArg(tp: Type): Boolean = tp match {
case AntiPolyType(pre, targs) => true
case ClassInfoType(parents, defs, clazz) => true
- case DeBruijnIndex(level, index, args) => true
case ErasedValueType(tref) => true
case NoPrefix => true
case NoType => true
@@ -5906,6 +4009,7 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeBounds(lo, hi) => true
case _ => false
}
+ ****/
private def isInternalTypeUsedAsTypeArg(tp: Type): Boolean = tp match {
case WildcardType => true
case BoundedWildcardType(_) => true
@@ -5951,7 +4055,7 @@ trait Types extends api.Types { self: SymbolTable =>
* useful as documentation; it is likely that !isNonValueType(tp)
* will serve better than isValueType(tp).
*/
- def isValueType(tp: Type) = isValueElseNonValue(tp)
+ /** def isValueType(tp: Type) = isValueElseNonValue(tp) */
/** SLS 3.3, Non-Value Types
* Is the given type definitely a non-value type, as defined in SLS 3.3?
@@ -5962,7 +4066,7 @@ trait Types extends api.Types { self: SymbolTable =>
* not designated non-value types because there is code which depends on using
* them as type arguments, but their precise status is unclear.
*/
- def isNonValueType(tp: Type) = !isValueElseNonValue(tp)
+ /** def isNonValueType(tp: Type) = !isValueElseNonValue(tp) */
def isNonRefinementClassType(tpe: Type) = tpe match {
case SingleType(_, sym) => sym.isModuleClass
@@ -5971,263 +4075,20 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => false
}
- // @assume tp1.isHigherKinded || tp2.isHigherKinded
- def isHKSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = (
- tp1.typeSymbol == NothingClass
- ||
- tp2.typeSymbol == AnyClass // @M Any and Nothing are super-type resp. subtype of every well-kinded type
- || // @M! normalize reduces higher-kinded case to PolyType's
- ((tp1.normalize.withoutAnnotations , tp2.normalize.withoutAnnotations) match {
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType)
- sameLength(tparams1, tparams2) && {
- if (tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
- (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
- res1 <:< res2.substSym(tparams2, tparams1)
- } else { // normalized higher-kinded type
- //@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
- val tpsFresh = cloneSymbols(tparams1)
-
- (tparams1 corresponds tparams2)((p1, p2) =>
- p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
- res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
-
- //@M the forall in the previous test could be optimised to the following,
- // but not worth the extra complexity since it only shaves 1s from quick.comp
- // (List.forall2(tpsFresh/*optimisation*/, tparams2)((p1, p2) =>
- // p2.info.substSym(tparams2, tpsFresh) <:< p1.info /*optimisation, == (p1 from tparams1).info.substSym(tparams1, tpsFresh)*/) &&
- // this optimisation holds because inlining cloneSymbols in `val tpsFresh = cloneSymbols(tparams1)` gives:
- // val tpsFresh = tparams1 map (_.cloneSymbol)
- // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh))
- }
- } && annotationsConform(tp1.normalize, tp2.normalize)
- case (_, _) => false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded
- // --> thus, cannot be subtypes (Any/Nothing has already been checked)
- }))
-
def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = {
- def isSubArg(t1: Type, t2: Type, variance: Int) =
- (variance > 0 || isSubType(t2, t1, depth)) &&
- (variance < 0 || isSubType(t1, t2, depth))
- corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
- }
-
- def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
-
- /** Does type `tp1` conform to `tp2`? */
- private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
- if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
- if ((tp1 eq NoType) || (tp2 eq NoType)) return false
- if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
- if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
- if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
- if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth)
-
- /** First try, on the right:
- * - unwrap Annotated types, BoundedWildcardTypes,
- * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
- * - handle common cases for first-kind TypeRefs on both sides as a fast path.
- */
- def firstTry = tp2 match {
- // fast path: two typerefs, none of them HK
- case tr2: TypeRef =>
- tp1 match {
- case tr1: TypeRef =>
- val sym1 = tr1.sym
- val sym2 = tr2.sym
- val pre1 = tr1.pre
- val pre2 = tr2.pre
- (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
- else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
- (isUnifiable(pre1, pre2) ||
- isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
- sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
- isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
- ||
- sym2.isClass && {
- val base = tr1 baseType sym2
- (base ne tr1) && isSubType(base, tr2, depth)
- }
- ||
- thirdTryRef(tr1, tr2))
- case _ =>
- secondTry
- }
- case AnnotatedType(_, _, _) =>
- isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
- annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- isSubType(tp1, bounds.hi, depth)
- case tv2 @ TypeVar(_, constr2) =>
- tp1 match {
- case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
- secondTry
- case _ =>
- tv2.registerBound(tp1, true)
- }
- case _ =>
- secondTry
- }
-
- /** Second try, on the left:
- * - unwrap AnnotatedTypes, BoundedWildcardTypes,
- * - bind typevars,
- * - handle existential types by skolemization.
- */
- def secondTry = tp1 match {
- case AnnotatedType(_, _, _) =>
- isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
- annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- isSubType(tp1.bounds.lo, tp2, depth)
- case tv @ TypeVar(_,_) =>
- tv.registerBound(tp2, false)
- case ExistentialType(_, _) =>
- try {
- skolemizationLevel += 1
- isSubType(tp1.skolemizeExistential, tp2, depth)
- } finally {
- skolemizationLevel -= 1
- }
- case _ =>
- thirdTry
- }
-
- def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
- val sym2 = tp2.sym
- sym2 match {
- case NotNullClass => tp1.isNotNull
- case SingletonClass => tp1.isStable || fourthTry
- case _: ClassSymbol =>
- if (isRaw(sym2, tp2.args))
- isSubType(tp1, rawToExistential(tp2), depth)
- else if (sym2.name == tpnme.REFINE_CLASS_NAME)
- isSubType(tp1, sym2.info, depth)
- else
- fourthTry
- case _: TypeSymbol =>
- if (sym2 hasFlag DEFERRED) {
- val tp2a = tp2.bounds.lo
- isDifferentTypeConstructor(tp2, tp2a) &&
- isSubType(tp1, tp2a, depth) ||
- fourthTry
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- fourthTry
- }
- }
-
- /** Third try, on the right:
- * - decompose refined types.
- * - handle typerefs, existentials, and notnull types.
- * - handle left+right method types, polytypes, typebounds
- */
- def thirdTry = tp2 match {
- case tr2: TypeRef =>
- thirdTryRef(tp1, tr2)
- case rt2: RefinedType =>
- (rt2.parents forall (isSubType(tp1, _, depth))) &&
- (rt2.decls forall (specializesSym(tp1, _, depth)))
- case et2: ExistentialType =>
- et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
- case nn2: NotNullType =>
- tp1.isNotNull && isSubType(tp1, nn2.underlying, depth)
- case mt2: MethodType =>
- tp1 match {
- case mt1 @ MethodType(params1, res1) =>
- val params2 = mt2.params
- val res2 = mt2.resultType
- (sameLength(params1, params2) &&
- mt1.isImplicit == mt2.isImplicit &&
- matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- isSubType(res1.substSym(params1, params2), res2, depth))
- // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
- case _ =>
- false
- }
- case pt2 @ NullaryMethodType(_) =>
- tp1 match {
- // TODO: consider MethodType mt for which mt.params.isEmpty??
- case pt1 @ NullaryMethodType(_) =>
- isSubType(pt1.resultType, pt2.resultType, depth)
- case _ =>
- false
- }
- case TypeBounds(lo2, hi2) =>
- tp1 match {
- case TypeBounds(lo1, hi1) =>
- isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
- case _ =>
- false
- }
- case _ =>
- fourthTry
- }
-
- /** Fourth try, on the left:
- * - handle typerefs, refined types, notnull and singleton types.
- */
- def fourthTry = tp1 match {
- case tr1 @ TypeRef(pre1, sym1, _) =>
- sym1 match {
- case NothingClass => true
- case NullClass =>
- tp2 match {
- case TypeRef(_, sym2, _) =>
- containsNull(sym2)
- case _ =>
- isSingleType(tp2) && isSubType(tp1, tp2.widen, depth)
- }
- case _: ClassSymbol =>
- if (isRaw(sym1, tr1.args))
- isSubType(rawToExistential(tp1), tp2, depth)
- else if (sym1.isModuleClass) tp2 match {
- case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
- case _ => false
- }
- else if (sym1.isRefinementClass)
- isSubType(sym1.info, tp2, depth)
- else false
-
- case _: TypeSymbol =>
- if (sym1 hasFlag DEFERRED) {
- val tp1a = tp1.bounds.hi
- isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth)
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- false
- }
- case RefinedType(parents1, _) =>
- parents1 exists (isSubType(_, tp2, depth))
- case _: SingletonType | _: NotNullType =>
- isSubType(tp1.underlying, tp2, depth)
- case _ =>
- false
- }
+ def isSubArg(t1: Type, t2: Type, variance: Variance) = (
+ (variance.isContravariant || isSubType(t1, t2, depth))
+ && (variance.isCovariant || isSubType(t2, t1, depth))
+ )
- firstTry
+ corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
}
- private def containsNull(sym: Symbol): Boolean =
+ protected[internal] def containsNull(sym: Symbol): Boolean =
sym.isClass && sym != NothingClass &&
!(sym isNonBottomSubClass AnyValClass) &&
!(sym isNonBottomSubClass NotNullClass)
- /** Are `tps1` and `tps2` lists of equal length such that all elements
- * of `tps1` conform to corresponding elements of `tps2`?
- */
- def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
-
- /** Does type `tp` implement symbol `sym` with same or
- * stronger type? Exact only if `sym` is a member of some
- * refinement type, otherwise we might return false negatives.
- */
- def specializesSym(tp: Type, sym: Symbol): Boolean =
- specializesSym(tp, sym, AnyDepth)
-
def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean =
tp.typeSymbol == NothingClass ||
tp.typeSymbol == NullClass && containsNull(sym.owner) || {
@@ -6245,7 +4106,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** Does member `sym1` of `tp1` have a stronger type
* than member `sym2` of `tp2`?
*/
- private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
+ protected[internal] def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
+ require((sym1 ne NoSymbol) && (sym2 ne NoSymbol), ((tp1, sym1, tp2, sym2, depth)))
val info1 = tp1.memberInfo(sym1)
val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
//System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
@@ -6269,7 +4131,7 @@ trait Types extends api.Types { self: SymbolTable =>
def lastTry =
tp2 match {
case ExistentialType(_, res2) if alwaysMatchSimple =>
- matchesType(tp1, res2, true)
+ matchesType(tp1, res2, alwaysMatchSimple = true)
case MethodType(_, _) =>
false
case PolyType(_, _) =>
@@ -6289,7 +4151,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
else matchesType(tp1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
+ alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case TypeRef(_, sym, Nil) =>
params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
@@ -6302,7 +4164,7 @@ trait Types extends api.Types { self: SymbolTable =>
case NullaryMethodType(res2) =>
matchesType(res1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
+ alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case TypeRef(_, sym, Nil) if sym.isModuleClass =>
matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
@@ -6316,7 +4178,7 @@ trait Types extends api.Types { self: SymbolTable =>
else
matchesQuantified(tparams1, tparams2, res1, res2)
case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
+ alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case _ =>
false // remember that tparams1.nonEmpty is now an invariant of PolyType
}
@@ -6325,7 +4187,7 @@ trait Types extends api.Types { self: SymbolTable =>
case ExistentialType(tparams2, res2) =>
matchesQuantified(tparams1, tparams2, res1, res2)
case _ =>
- if (alwaysMatchSimple) matchesType(res1, tp2, true)
+ if (alwaysMatchSimple) matchesType(res1, tp2, alwaysMatchSimple = true)
else lastTry
}
case TypeRef(_, sym, Nil) if sym.isModuleClass =>
@@ -6378,7 +4240,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
/** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
- private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
+ protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
case Nil =>
syms2.isEmpty
case sym1 :: rest1 =>
@@ -6399,7 +4261,7 @@ trait Types extends api.Types { self: SymbolTable =>
* `f` maps all elements to themselves.
*/
def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
- if (xs.isEmpty) xs
+ if (xs.isEmpty || ys.isEmpty) xs
else {
val x1 = f(xs.head, ys.head)
val xs1 = map2Conserve(xs.tail, ys.tail)(f)
@@ -6407,87 +4269,6 @@ trait Types extends api.Types { self: SymbolTable =>
else x1 :: xs1
}
- /** Solve constraint collected in types `tvars`.
- *
- * @param tvars All type variables to be instantiated.
- * @param tparams The type parameters corresponding to `tvars`
- * @param variances The variances of type parameters; need to reverse
- * solution direction for all contravariant variables.
- * @param upper When `true` search for max solution else min.
- */
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean): Boolean =
- solve(tvars, tparams, variances, upper, AnyDepth)
-
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean, depth: Int): Boolean = {
-
- def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) {
- if (tvar.constr.inst == NoType) {
- val up = if (variance != CONTRAVARIANT) upper else !upper
- tvar.constr.inst = null
- val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
- //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
- var cyclic = bound contains tparam
- foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
- val ok = (tparam2 != tparam) && (
- (bound contains tparam2)
- || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
- || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
- )
- if (ok) {
- if (tvar2.constr.inst eq null) cyclic = true
- solveOne(tvar2, tparam2, variance2)
- }
- })
- if (!cyclic) {
- if (up) {
- if (bound.typeSymbol != AnyClass) {
- log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
- tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
- }
- for (tparam2 <- tparams)
- tparam2.info.bounds.lo.dealias match {
- case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
- tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- } else {
- if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
- log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
- tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
- }
- for (tparam2 <- tparams)
- tparam2.info.bounds.hi.dealias match {
- case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
- tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- }
- }
- tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
-
- //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
- val newInst = (
- if (up) {
- if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
- } else {
- if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
- }
- )
- log(s"$tvar setInst $newInst")
- tvar setInst newInst
- //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
- }
- }
-
- // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
- foreach3(tvars, tparams, variances)(solveOne)
- tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
- }
-
/** Do type arguments `targs` conform to formal parameters `tparams`?
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
@@ -6500,563 +4281,12 @@ trait Types extends api.Types { self: SymbolTable =>
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
-// Lubs and Glbs ---------------------------------------------------------
-
- private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
- import util.TableDef
- import TableDef.Column
- def str(tp: Type) = {
- if (tp == NoType) ""
- else {
- val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
- if (s.length < 60) s
- else (s take 57) + "..."
- }
- }
-
- val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
- val maxSeqLength = sorted.map(_._2.size).max
- val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
- val transposed = padded.transpose
-
- val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
- case ((k, v), idx) =>
- Column(str(k), (xs: List[Type]) => str(xs(idx)), true)
- }
-
- val tableDef = TableDef(columns: _*)
- val formatted = tableDef.table(transposed)
- println("** Depth is " + depth + "\n" + formatted)
- }
-
- /** From a list of types, find any which take type parameters
- * where the type parameter bounds contain references to other
- * any types in the list (including itself.)
- *
- * @return List of symbol pairs holding the recursive type
- * parameter and the parameter which references it.
- */
- def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
- if (ts.isEmpty) Nil
- else {
- val sym = ts.head.typeSymbol
- require(ts.tail forall (_.typeSymbol == sym), ts)
- for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
- p -> in
- }
- }
-
- /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
- * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
- *
- * xs <= ys iff forall y in ys exists x in xs such that x <: y
- *
- * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
- * (these type parameters may be referred to by type arguments in the BTS column of those types,
- * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
- * @arg tsBts a matrix whose columns are basetype sequences
- * the first row is the original list of types for which we're computing the lub
- * (except that type constructors have been applied to their dummyArgs)
- * @See baseTypeSeq for a definition of sorted and upwards closed.
- */
- private def lubList(ts: List[Type], depth: Int): List[Type] = {
- // Matching the type params of one of the initial types means dummies.
- val initialTypeParams = ts map (_.typeParams)
- def isHotForTs(xs: List[Type]) = initialTypeParams contains (xs map (_.typeSymbol))
-
- def elimHigherOrderTypeParam(tp: Type) = tp match {
- case TypeRef(pre, sym, args) if args.nonEmpty && isHotForTs(args) => tp.typeConstructor
- case _ => tp
- }
- var lubListDepth = 0
- def loop(tsBts: List[List[Type]]): List[Type] = {
- lubListDepth += 1
-
- if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) Nil
- else if (tsBts.tail.isEmpty) tsBts.head
- else {
- // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
- // Invariant: all symbols "under" (closer to the first row) the frontier
- // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
- val ts0 = tsBts map (_.head)
-
- // Is the frontier made up of types with the same symbol?
- val isUniformFrontier = (ts0: @unchecked) match {
- case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
- }
-
- // Produce a single type for this frontier by merging the prefixes and arguments of those
- // typerefs that share the same symbol: that symbol is the current maximal symbol for which
- // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
- // merging, strip targs that refer to bound tparams (when we're computing the lub of type
- // constructors.) Also filter out all types that are a subtype of some other type.
- if (isUniformFrontier) {
- if (settings.debug.value || printLubs) {
- val fbounds = findRecursiveBounds(ts0)
- if (fbounds.nonEmpty) {
- println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.")
- for ((p0, p1) <- fbounds) {
- val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1
-
- println(" " + p0.fullLocationString + " appears in " + desc)
- println(" " + p1 + " " + p1.info.bounds)
- }
- println("")
- }
- }
- val tails = tsBts map (_.tail)
- mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match {
- case Some(tp) => tp :: loop(tails)
- case _ => loop(tails)
- }
- }
- else {
- // frontier is not uniform yet, move it beyond the current minimal symbol;
- // lather, rinSe, repeat
- val sym = minSym(ts0)
- val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
- if (printLubs) {
- val str = (newtps.zipWithIndex map { case (tps, idx) =>
- tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
- }).mkString("")
-
- println("Frontier(\n" + str + ")")
- printLubMatrix((ts zip tsBts).toMap, lubListDepth)
- }
-
- loop(newtps)
- }
- }
- }
-
- val initialBTSes = ts map (_.baseTypeSeq.toList)
- if (printLubs)
- printLubMatrix((ts zip initialBTSes).toMap, depth)
-
- loop(initialBTSes)
- }
-
- /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
- private def minSym(tps: List[Type]): Symbol =
- (tps.head.typeSymbol /: tps.tail) {
- (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
- }
-
- /** A minimal type list which has a given list of types as its base type sequence */
- def spanningTypes(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case first :: rest =>
- first :: spanningTypes(
- rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
- }
-
- /** Eliminate from list of types all elements which are a supertype
- * of some other element of the list. */
- private def elimSuper(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case List(t) => List(t)
- case t :: ts1 =>
- val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
- if (rest exists (t1 => t1 <:< t)) rest else t :: rest
- }
-
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
clazz.classBound.asSeenFrom(pre, clazz.owner)
case _ =>
t
}
- def elimRefinement(t: Type) = t match {
- case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
- case _ => t
- }
-
- /** Eliminate from list of types all elements which are a subtype
- * of some other element of the list. */
- private def elimSub(ts: List[Type], depth: Int): List[Type] = {
- def elimSub0(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case List(t) => List(t)
- case t :: ts1 =>
- val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
- if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
- }
- val ts0 = elimSub0(ts)
- if (ts0.isEmpty || ts0.tail.isEmpty) ts0
- else {
- val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
- if (ts1 eq ts0) ts0
- else elimSub(ts1, depth)
- }
- }
-
- private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
- val quantified = ts flatMap {
- case ExistentialType(qs, _) => qs
- case t => List()
- }
- def stripType(tp: Type): Type = tp match {
- case ExistentialType(_, res) =>
- res
- case tv@TypeVar(_, constr) =>
- if (tv.instValid) stripType(constr.inst)
- else if (tv.untouchable) tv
- else abort("trying to do lub/glb of typevar "+tp)
- case t => t
- }
- val strippedTypes = ts mapConserve stripType
- (strippedTypes, quantified)
- }
-
- def weakLub(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
- else if (ts exists typeHasAnnotations)
- (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
- else (lub(ts), false)
-
- def weakGlb(ts: List[Type]) = {
- if (ts.nonEmpty && (ts forall isNumericValueType)) {
- val nglb = numericGlb(ts)
- if (nglb != NoType) (nglb, true)
- else (glb(ts), false)
- } else if (ts exists typeHasAnnotations) {
- (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
- } else (glb(ts), false)
- }
-
- def numericLub(ts: List[Type]) =
- ts reduceLeft ((t1, t2) =>
- if (isNumericSubType(t1, t2)) t2
- else if (isNumericSubType(t2, t1)) t1
- else IntClass.tpe)
-
- def numericGlb(ts: List[Type]) =
- ts reduceLeft ((t1, t2) =>
- if (isNumericSubType(t1, t2)) t1
- else if (isNumericSubType(t2, t1)) t2
- else NoType)
-
- def isWeakSubType(tp1: Type, tp2: Type) =
- tp1.deconst.normalize match {
- case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- isNumericSubClass(sym1, sym2)
- case tv2 @ TypeVar(_, _) =>
- tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case tv1 @ TypeVar(_, _) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case _ =>
- isSubType(tp1, tp2)
- }
-
- /** The isNumericValueType tests appear redundant, but without them
- * test/continuations-neg/function3.scala goes into an infinite loop.
- * (Even if the calls are to typeSymbolDirect.)
- */
- def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
- isNumericValueType(tp1)
- && isNumericValueType(tp2)
- && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
- )
-
- private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
- private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
-
- def lub(ts: List[Type]): Type = ts match {
- case List() => NothingClass.tpe
- case List(t) => t
- case _ =>
- if (Statistics.canEnable) Statistics.incCounter(lubCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
- try {
- lub(ts, lubDepth(ts))
- } finally {
- lubResults.clear()
- glbResults.clear()
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- }
- }
-
- /** The least upper bound wrt <:< of a list of types */
- private def lub(ts: List[Type], depth: Int): Type = {
- def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
- case List() => NothingClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
- tparam.cloneSymbol.setInfo(glb(bounds, depth)))
- PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
- case ts @ (mt @ MethodType(params, _)) :: rest =>
- MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
- case ts @ AnnotatedType(annots, tpe, _) :: rest =>
- annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
- case ts =>
- lubResults get (depth, ts) match {
- case Some(lubType) =>
- lubType
- case None =>
- lubResults((depth, ts)) = AnyClass.tpe
- val res = if (depth < 0) AnyClass.tpe else lub1(ts)
- lubResults((depth, ts)) = res
- res
- }
- }
- def lub1(ts0: List[Type]): Type = {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val lubBaseTypes: List[Type] = lubList(ts, depth)
- val lubParents = spanningTypes(lubBaseTypes)
- val lubOwner = commonOwner(ts)
- val lubBase = intersectionType(lubParents, lubOwner)
- val lubType =
- if (phase.erasedTypes || depth == 0) lubBase
- else {
- val lubRefined = refinedType(lubParents, lubOwner)
- val lubThisType = lubRefined.typeSymbol.thisType
- val narrowts = ts map (_.narrow)
- def excludeFromLub(sym: Symbol) = (
- sym.isClass
- || sym.isConstructor
- || !sym.isPublic
- || isGetClass(sym)
- || narrowts.exists(t => !refines(t, sym))
- )
- def lubsym(proto: Symbol): Symbol = {
- val prototp = lubThisType.memberInfo(proto)
- val syms = narrowts map (t =>
- t.nonPrivateMember(proto.name).suchThat(sym =>
- sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
- if (syms contains NoSymbol) NoSymbol
- else {
- val symtypes =
- map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
- if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
- else if (symtypes.tail forall (symtypes.head =:= _))
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
- else {
- def lubBounds(bnds: List[TypeBounds]): TypeBounds =
- TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
- lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
- .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
- }
- }
- }
- def refines(tp: Type, sym: Symbol): Boolean = {
- val syms = tp.nonPrivateMember(sym.name).alternatives;
- !syms.isEmpty && (syms forall (alt =>
- // todo alt != sym is strictly speaking not correct, but without it we lose
- // efficiency.
- alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
- }
- // add a refinement symbol for all non-class members of lubBase
- // which are refined by every type in ts.
- for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
- try {
- val lsym = lubsym(sym)
- if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lsym, depth)
- } catch {
- case ex: NoCommonType =>
- }
- }
- if (lubRefined.decls.isEmpty) lubBase
- else if (!verifyLubs) lubRefined
- else {
- // Verify that every given type conforms to the calculated lub.
- // In theory this should not be necessary, but higher-order type
- // parameters are not handled correctly.
- val ok = ts forall { t =>
- isSubType(t, lubRefined, depth) || {
- if (settings.debug.value || printLubs) {
- Console.println(
- "Malformed lub: " + lubRefined + "\n" +
- "Argument " + t + " does not conform. Falling back to " + lubBase
- )
- }
- false
- }
- }
- // If not, fall back on the more conservative calculation.
- if (ok) lubRefined
- else lubBase
- }
- }
- // dropRepeatedParamType is a localized fix for SI-6897. We should probably
- // integrate that transformation at a lower level in master, but lubs are
- // the likely and maybe only spot they escape, so fixing here for 2.10.1.
- existentialAbstraction(tparams, dropRepeatedParamType(lubType))
- }
- if (printLubs) {
- println(indent + "lub of " + ts + " at depth "+depth)//debug
- indent = indent + " "
- assert(indent.length <= 100)
- }
- if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
- val res = lub0(ts)
- if (printLubs) {
- indent = indent stripSuffix " "
- println(indent + "lub of " + ts + " is " + res)//debug
- }
- if (ts forall typeIsNotNull) res.notNull else res
- }
-
- val GlbFailure = new Throwable
-
- /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
- * call in `glb`. There's a possible infinite recursion when `specializes` calls
- * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
- * The counter breaks this recursion after two calls.
- * If the recursion is broken, no member is added to the glb.
- */
- private var globalGlbDepth = 0
- private final val globalGlbLimit = 2
-
- /** The greatest lower bound of a list of types (as determined by `<:<`). */
- def glb(ts: List[Type]): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts0 =>
- if (Statistics.canEnable) Statistics.incCounter(lubCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
- try {
- glbNorm(ts0, lubDepth(ts0))
- } finally {
- lubResults.clear()
- glbResults.clear()
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- }
- }
-
- private def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts0 => glbNorm(ts0, depth)
- }
-
- /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
- * with regard to `elimSuper`. */
- protected def glbNorm(ts: List[Type], depth: Int): Type = {
- def glb0(ts0: List[Type]): Type = ts0 match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
- tparam.cloneSymbol.setInfo(lub(bounds, depth)))
- PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
- case ts @ (mt @ MethodType(params, _)) :: rest =>
- MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
- case ts =>
- glbResults get (depth, ts) match {
- case Some(glbType) =>
- glbType
- case _ =>
- glbResults((depth, ts)) = NothingClass.tpe
- val res = if (depth < 0) NothingClass.tpe else glb1(ts)
- glbResults((depth, ts)) = res
- res
- }
- }
- def glb1(ts0: List[Type]): Type = {
- try {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val glbOwner = commonOwner(ts)
- def refinedToParents(t: Type): List[Type] = t match {
- case RefinedType(ps, _) => ps flatMap refinedToParents
- case _ => List(t)
- }
- def refinedToDecls(t: Type): List[Scope] = t match {
- case RefinedType(ps, decls) =>
- val dss = ps flatMap refinedToDecls
- if (decls.isEmpty) dss else decls :: dss
- case _ => List()
- }
- val ts1 = ts flatMap refinedToParents
- val glbBase = intersectionType(ts1, glbOwner)
- val glbType =
- if (phase.erasedTypes || depth == 0) glbBase
- else {
- val glbRefined = refinedType(ts1, glbOwner)
- val glbThisType = glbRefined.typeSymbol.thisType
- def glbsym(proto: Symbol): Symbol = {
- val prototp = glbThisType.memberInfo(proto)
- val syms = for (t <- ts;
- alt <- (t.nonPrivateMember(proto.name).alternatives);
- if glbThisType.memberInfo(alt) matches prototp
- ) yield alt
- val symtypes = syms map glbThisType.memberInfo
- assert(!symtypes.isEmpty)
- proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
- if (proto.isTerm) glb(symtypes, decr(depth))
- else {
- def isTypeBound(tp: Type) = tp match {
- case TypeBounds(_, _) => true
- case _ => false
- }
- def glbBounds(bnds: List[Type]): TypeBounds = {
- val lo = lub(bnds map (_.bounds.lo), decr(depth))
- val hi = glb(bnds map (_.bounds.hi), decr(depth))
- if (lo <:< hi) TypeBounds(lo, hi)
- else throw GlbFailure
- }
- val symbounds = symtypes filter isTypeBound
- var result: Type =
- if (symbounds.isEmpty)
- TypeBounds.empty
- else glbBounds(symbounds)
- for (t <- symtypes if !isTypeBound(t))
- if (result.bounds containsType t) result = t
- else throw GlbFailure
- result
- })
- }
- if (globalGlbDepth < globalGlbLimit)
- try {
- globalGlbDepth += 1
- val dss = ts flatMap refinedToDecls
- for (ds <- dss; sym <- ds.iterator)
- if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
- try {
- addMember(glbThisType, glbRefined, glbsym(sym), depth)
- } catch {
- case ex: NoCommonType =>
- }
- } finally {
- globalGlbDepth -= 1
- }
- if (glbRefined.decls.isEmpty) glbBase else glbRefined
- }
- existentialAbstraction(tparams, glbType)
- } catch {
- case GlbFailure =>
- if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
- else NothingClass.tpe
- }
- }
- // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
-
- if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
- val res = glb0(ts)
-
- // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
-
- if (ts exists typeIsNotNull) res.notNull else res
- }
/** A list of the typevars in a type. */
def typeVarsInType(tp: Type): List[TypeVar] = {
@@ -7067,29 +4297,33 @@ trait Types extends api.Types { self: SymbolTable =>
}
tvs.reverse
}
- /** Make each type var in this type use its original type for comparisons instead
- * of collecting constraints.
- */
- def suspendTypeVarsInType(tp: Type): List[TypeVar] = {
- val tvs = typeVarsInType(tp)
- // !!! Is it somehow guaranteed that this will not break under nesting?
- // In general one has to save and restore the contents of the field...
+
+ // If this type contains type variables, put them to sleep for a while.
+ // Don't just wipe them out by replacing them by the corresponding type
+ // parameter, as that messes up (e.g.) type variables in type refinements.
+ // Without this, the matchesType call would lead to type variables on both
+ // sides of a subtyping/equality judgement, which can lead to recursive types
+ // being constructed. See pos/t0851 for a situation where this happens.
+ @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = {
+ val saved = tvs map (_.suspended)
tvs foreach (_.suspended = true)
- tvs
+
+ try op
+ finally foreach2(tvs, saved)(_.suspended = _)
}
- /** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
+ /** Compute lub (if `variance == Covariant`) or glb (if `variance == Contravariant`) of given list
* of types `tps`. All types in `tps` are typerefs or singletypes
* with the same symbol.
* Return `Some(x)` if the computation succeeds with result `x`.
* Return `None` if the computation fails.
*/
- def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
+ def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Int): Option[Type] = tps match {
case List(tp) =>
Some(tp)
case TypeRef(_, sym, _) :: rest =>
val pres = tps map (_.prefix) // prefix normalizes automatically
- val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
+ val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
val capturedParams = new ListBuffer[Symbol]
try {
@@ -7114,27 +4348,25 @@ trait Types extends api.Types { self: SymbolTable =>
debuglog("transposed irregular matrix!?" +(tps, argss))
None
case Some(argsst) =>
- val args = map2(sym.typeParams, argsst) { (tparam, as) =>
- if (depth == 0) {
- if (tparam.variance == variance) {
- // Take the intersection of the upper bounds of the type parameters
- // rather than falling all the way back to "Any", otherwise we end up not
- // conforming to bounds.
- val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
- if (bounds0.isEmpty) AnyClass.tpe
- else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
- }
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
+ val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
+ val as = as0.distinct
+ if (as.size == 1) as.head
+ else if (depth == 0) {
+ log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString))
+ // Don't return "Any" (or "Nothing") when we have to give up due to
+ // recursion depth. Return NoType, which prevents us from poisoning
+ // lublist's results. It can recognize the recursion and deal with it, but
+ // only if we aren't returning invalid types.
+ NoType
}
else {
if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
+ else if (tparam.variance == variance.flip) glb(as, decr(depth))
else {
val l = lub(as, decr(depth))
val g = glb(as, decr(depth))
if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
// just err on the conservative side, i.e. with a bound that is too high.
// if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
@@ -7153,7 +4385,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
- val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
+ val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
try {
Some(singleType(pre, sym))
} catch {
@@ -7177,7 +4409,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (sym.isTerm)
for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
if (specializesSym(thistp, sym, thistp, alt, depth))
- tp.decls unlink alt;
+ tp.decls unlink alt
tp.decls enter sym
}
}
@@ -7190,51 +4422,6 @@ trait Types extends api.Types { self: SymbolTable =>
def inheritsJavaVarArgsMethod(clazz: Symbol) =
clazz.thisType.baseClasses exists isJavaVarargsAncestor
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of list of bounds infos, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
- def getBounds(tp: Type): List[Type] = tp match {
- case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
- tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
- case tp =>
- if (tp ne tp.normalize) getBounds(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map getBounds
- }
-
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of instance types, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
- def transformResultType(tp: Type): Type = tp match {
- case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
- restpe.substSym(tparams1, tparams)
- case tp =>
- if (tp ne tp.normalize) transformResultType(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map transformResultType
- }
-
- /** All types in list must be method types with equal parameter types.
- * Returns list of their result types.
- */
- private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
- tps map {
- case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
- res
- case NullaryMethodType(res) if pts.isEmpty =>
- res
- case _ =>
- throw new NoCommonType(tps)
- }
-
// Errors and Diagnostics -----------------------------------------------------
/** A throwable signalling a type error */
@@ -7259,7 +4446,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** The current indentation string for traces */
- private var indent: String = ""
+ protected[internal] var indent: String = ""
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
@@ -7305,8 +4492,11 @@ trait Types extends api.Types { self: SymbolTable =>
/** Members of the given class, other than those inherited
* from Any or AnyRef.
*/
- def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] =
- clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass)
+ def nonTrivialMembers(clazz: Symbol): Scope = clazz.info.members filterNot isUniversalMember
+
+ /** Members which can be imported into other scopes.
+ */
+ def importableMembers(pre: Type): Scope = pre.members filter isImportable
def objToAny(tp: Type): Type =
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
@@ -7322,29 +4512,6 @@ trait Types extends api.Types { self: SymbolTable =>
"scala.collection.IndexedSeq",
"scala.collection.Iterator")
-
- /** The maximum number of recursions allowed in toString
- */
- final val maxTostringRecursions = 50
-
- private var tostringRecursions = 0
-
- protected def typeToString(tpe: Type): String =
- if (tostringRecursions >= maxTostringRecursions) {
- debugwarn("Exceeded recursion depth attempting to print type.")
- if (settings.debug.value)
- (new Throwable).printStackTrace
-
- "..."
- }
- else
- try {
- tostringRecursions += 1
- tpe.safeToString
- } finally {
- tostringRecursions -= 1
- }
-
// ----- Hoisted closures and convenience methods, for compile time reductions -------
private[scala] val typeIsNotNull = (tp: Type) => tp.isNotNull
@@ -7362,6 +4529,49 @@ trait Types extends api.Types { self: SymbolTable =>
private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass
private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
+ /** The maximum depth of type `tp` */
+ def typeDepth(tp: Type): Int = tp match {
+ case TypeRef(pre, sym, args) =>
+ math.max(typeDepth(pre), typeDepth(args) + 1)
+ case RefinedType(parents, decls) =>
+ math.max(typeDepth(parents), symTypeDepth(decls.toList) + 1)
+ case TypeBounds(lo, hi) =>
+ math.max(typeDepth(lo), typeDepth(hi))
+ case MethodType(paramtypes, result) =>
+ typeDepth(result)
+ case NullaryMethodType(result) =>
+ typeDepth(result)
+ case PolyType(tparams, result) =>
+ math.max(typeDepth(result), symTypeDepth(tparams) + 1)
+ case ExistentialType(tparams, result) =>
+ math.max(typeDepth(result), symTypeDepth(tparams) + 1)
+ case _ =>
+ 1
+ }
+
+ def withUncheckedVariance(tp: Type): Type =
+ tp withAnnotation (AnnotationInfo marker uncheckedVarianceClass.tpe)
+
+ //OPT replaced with tailrecursive function to save on #closures
+ // was:
+ // var d = 0
+ // for (tp <- tps) d = d max by(tp) //!!!OPT!!!
+ // d
+ private[scala] def maxDepth(tps: List[Type]): Int = {
+ @tailrec def loop(tps: List[Type], acc: Int): Int = tps match {
+ case tp :: rest => loop(rest, math.max(acc, typeDepth(tp)))
+ case _ => acc
+ }
+ loop(tps, 0)
+ }
+ private[scala] def maxBaseTypeSeqDepth(tps: List[Type]): Int = {
+ @tailrec def loop(tps: List[Type], acc: Int): Int = tps match {
+ case tp :: rest => loop(rest, math.max(acc, tp.baseTypeSeqDepth))
+ case _ => acc
+ }
+ loop(tps, 0)
+ }
+
@tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match {
case tp :: rest => (tp contains sym) || typesContain(rest, sym)
case _ => false
@@ -7401,7 +4611,6 @@ trait Types extends api.Types { self: SymbolTable =>
object TypesStats {
import BaseTypeSeqsStats._
val rawTypeCount = Statistics.newCounter ("#raw type creations")
- val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops")
val subtypeCount = Statistics.newCounter ("#subtype ops")
val sametypeCount = Statistics.newCounter ("#sametype ops")
val lubCount = Statistics.newCounter ("#toplevel lubs/glbs")
diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala
new file mode 100644
index 0000000000..007d56eb35
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Variance.scala
@@ -0,0 +1,90 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect
+package internal
+
+import Variance._
+
+/** Variances form a lattice:
+ *
+ * - Covariant -
+ * / \
+ * Invariant Bivariant
+ * \ /
+ * Contravariant
+ *
+ * The variance of a symbol within a type is calculated based on variance
+ * annotations, e.g. +A or -A, and the positions of the types in which the
+ * symbol appears. The actual mechanics are beyond the scope of this
+ * comment, but the essential operations on a Variance are:
+ *
+ * '&' - like bitwise AND. Unless all inputs have compatible variance,
+ * folding them across & will be invariant.
+ * '*' - like multiplication across { -1, 0, 1 } with contravariance as -1.
+ * flip - if contravariant or covariant, flip to the other; otherwise leave unchanged.
+ * cut - if bivariant, remain bivariant; otherwise become invariant.
+ *
+ * There is an important distinction between "isPositive" and "isCovariant".
+ * The former is true for both Covariant and Bivariant, but the latter is true
+ * only for Covariant.
+ */
+final class Variance private (val flags: Int) extends AnyVal {
+ def isBivariant = flags == 2
+ def isCovariant = flags == 1 // excludes bivariant
+ def isInvariant = flags == 0
+ def isContravariant = flags == -1 // excludes bivariant
+ def isPositive = flags > 0 // covariant or bivariant
+
+ def &(other: Variance): Variance = (
+ if (this == other) this
+ else if (this.isBivariant) other
+ else if (other.isBivariant) this
+ else Invariant
+ )
+
+ def *(other: Variance): Variance = (
+ if (other.isPositive) this
+ else if (other.isContravariant) this.flip
+ else this.cut
+ )
+
+ /** Flip between covariant and contravariant. I chose not to use unary_- because it doesn't stand out enough. */
+ def flip = if (isCovariant) Contravariant else if (isContravariant) Covariant else this
+
+ /** Map everything below bivariant to invariant. */
+ def cut = if (isBivariant) this else Invariant
+
+ /** The symbolic annotation used to indicate the given kind of variance. */
+ def symbolicString = (
+ if (isBivariant) "+/-"
+ else if (isCovariant) "+"
+ else if (isContravariant) "-"
+ else ""
+ )
+
+ override def toString = (
+ if (isContravariant) "contravariant"
+ else if (isCovariant) "covariant"
+ else if (isInvariant) "invariant"
+ else "" // noisy to print bivariant on everything without type parameters
+ )
+}
+
+object Variance {
+ implicit class SbtCompat(val v: Variance) {
+ def < (other: Int) = v.flags < other
+ def > (other: Int) = v.flags > other
+ }
+
+ def fold(variances: List[Variance]): Variance = (
+ if (variances.isEmpty) Bivariant
+ else variances reduceLeft (_ & _)
+ )
+ val Bivariant = new Variance(2)
+ val Covariant = new Variance(1)
+ val Contravariant = new Variance(-1)
+ val Invariant = new Variance(0)
+}
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
new file mode 100644
index 0000000000..716e49b303
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -0,0 +1,196 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import Variance._
+import scala.collection.{ mutable, immutable }
+import scala.annotation.tailrec
+
+/** See comments at scala.reflect.internal.Variance.
+ */
+trait Variances {
+ self: SymbolTable =>
+
+ /** Used in Refchecks.
+ * TODO - eliminate duplication with varianceInType
+ */
+ class VarianceValidator extends Traverser {
+ private val escapedLocals = mutable.HashSet[Symbol]()
+ // A flag for when we're in a refinement, meaning method parameter types
+ // need to be checked.
+ private var inRefinement = false
+ @inline private def withinRefinement(body: => Type): Type = {
+ val saved = inRefinement
+ inRefinement = true
+ try body finally inRefinement = saved
+ }
+
+ /** Is every symbol in the owner chain between `site` and the owner of `sym`
+ * either a term symbol or private[this]? If not, add `sym` to the set of
+ * esacped locals.
+ * @pre sym.hasLocalFlag
+ */
+ @tailrec final def checkForEscape(sym: Symbol, site: Symbol) {
+ if (site == sym.owner || site == sym.owner.moduleClass || site.isPackage) () // done
+ else if (site.isTerm || site.isPrivateLocal) checkForEscape(sym, site.owner) // ok - recurse to owner
+ else escapedLocals += sym
+ }
+
+ protected def issueVarianceError(base: Symbol, sym: Symbol, required: Variance): Unit = ()
+
+ // Flip occurrences of type parameters and parameters, unless
+ // - it's a constructor, or case class factory or extractor
+ // - it's a type parameter of tvar's owner.
+ def shouldFlip(sym: Symbol, tvar: Symbol) = (
+ sym.isParameter
+ && !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem && tvar.owner == sym.owner)
+ )
+ // return Bivariant if `sym` is local to a term
+ // or is private[this] or protected[this]
+ def isLocalOnly(sym: Symbol) = !sym.owner.isClass || (
+ sym.isTerm
+ && (sym.hasLocalFlag || sym.isSuperAccessor) // super accessors are implicitly local #4345
+ && !escapedLocals(sym)
+ )
+
+ private object ValidateVarianceMap extends TypeMap(trackVariance = true) {
+ private var base: Symbol = _
+
+ /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`.
+ * The search proceeds from `base` to the owner of `tvar`.
+ * Initially the state is covariant, but it might change along the search.
+ *
+ * An alias which does not override anything is treated as Bivariant;
+ * this is OK because we always expand aliases for variance checking.
+ * However if it does override a type in a base class, we must assume Invariant
+ * because there may be references to the type parameter that are not checked.
+ */
+ def relativeVariance(tvar: Symbol): Variance = {
+ def nextVariance(sym: Symbol, v: Variance): Variance = (
+ if (shouldFlip(sym, tvar)) v.flip
+ else if (isLocalOnly(sym)) Bivariant
+ else if (sym.isAliasType) Invariant
+ else v
+ )
+ def loop(sym: Symbol, v: Variance): Variance = (
+ if (sym == tvar.owner || v.isBivariant) v
+ else loop(sym.owner, nextVariance(sym, v))
+ )
+ loop(base, Covariant)
+ }
+ def isUncheckedVariance(tp: Type) = tp match {
+ case AnnotatedType(annots, _, _) => annots exists (_ matches definitions.uncheckedVarianceClass)
+ case _ => false
+ }
+
+ private def checkVarianceOfSymbol(sym: Symbol) {
+ val relative = relativeVariance(sym)
+ val required = relative * variance
+ if (!relative.isBivariant) {
+ log(s"verifying $sym (${sym.variance}${sym.locationString}) is $required at $base in ${base.owner}")
+ if (sym.variance != required)
+ issueVarianceError(base, sym, required)
+ }
+ }
+ override def mapOver(decls: Scope): Scope = {
+ decls foreach (sym => withVariance(if (sym.isAliasType) Invariant else variance)(this(sym.info)))
+ decls
+ }
+ private def resultTypeOnly(tp: Type) = tp match {
+ case mt: MethodType => !inRefinement
+ case pt: PolyType => true
+ case _ => false
+ }
+
+ /** For PolyTypes, type parameters are skipped because they are defined
+ * explicitly (their TypeDefs will be passed here.) For MethodTypes, the
+ * same is true of the parameters (ValDefs) unless we are inside a
+ * refinement, in which case they are checked from here.
+ */
+ def apply(tp: Type): Type = tp match {
+ case _ if isUncheckedVariance(tp) => tp
+ case _ if resultTypeOnly(tp) => this(tp.resultType)
+ case TypeRef(_, sym, _) if sym.isAliasType => this(tp.normalize)
+ case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym) ; mapOver(tp)
+ case RefinedType(_, _) => withinRefinement(mapOver(tp))
+ case ClassInfoType(parents, _, _) => parents foreach this ; tp
+ case mt @ MethodType(_, result) => flipped(mt.paramTypes foreach this) ; this(result)
+ case _ => mapOver(tp)
+ }
+ def validateDefinition(base: Symbol) {
+ val saved = this.base
+ this.base = base
+ try apply(base.info)
+ finally this.base = saved
+ }
+ }
+
+ /** Validate variance of info of symbol `base` */
+ private def validateVariance(base: Symbol) {
+ ValidateVarianceMap validateDefinition base
+ }
+
+ override def traverse(tree: Tree) {
+ def sym = tree.symbol
+ // No variance check for object-private/protected methods/values.
+ // Or constructors, or case class factory or extractor.
+ def skip = (
+ sym.hasLocalFlag
+ || sym.owner.isConstructor
+ || sym.owner.isCaseApplyOrUnapply
+ )
+ tree match {
+ case defn: MemberDef if skip =>
+ log(s"Skipping variance check of ${sym.defString}")
+ case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
+ validateVariance(sym)
+ super.traverse(tree)
+ // ModuleDefs need not be considered because they have been eliminated already
+ case ValDef(_, _, _, _) =>
+ validateVariance(sym)
+ case DefDef(_, _, tparams, vparamss, _, _) =>
+ validateVariance(sym)
+ traverseTrees(tparams)
+ traverseTreess(vparamss)
+ case Template(_, _, _) =>
+ super.traverse(tree)
+ case _ =>
+ }
+ }
+ }
+
+ /** Compute variance of type parameter `tparam` in all types `tps`. */
+ def varianceInTypes(tps: List[Type])(tparam: Symbol): Variance =
+ fold(tps map (tp => varianceInType(tp)(tparam)))
+
+ /** Compute variance of type parameter `tparam` in type `tp`. */
+ def varianceInType(tp: Type)(tparam: Symbol): Variance = {
+ def inArgs(sym: Symbol, args: List[Type]): Variance = fold(map2(args, sym.typeParams)((a, p) => inType(a) * p.variance))
+ def inSyms(syms: List[Symbol]): Variance = fold(syms map inSym)
+ def inTypes(tps: List[Type]): Variance = fold(tps map inType)
+
+ def inSym(sym: Symbol): Variance = if (sym.isAliasType) inType(sym.info).cut else inType(sym.info)
+ def inType(tp: Type): Variance = tp match {
+ case ErrorType | WildcardType | NoType | NoPrefix => Bivariant
+ case ThisType(_) | ConstantType(_) => Bivariant
+ case TypeRef(_, `tparam`, _) => Covariant
+ case BoundedWildcardType(bounds) => inType(bounds)
+ case NullaryMethodType(restpe) => inType(restpe)
+ case SingleType(pre, sym) => inType(pre)
+ case TypeRef(pre, _, _) if tp.isHigherKinded => inType(pre) // a type constructor cannot occur in tp's args
+ case TypeRef(pre, sym, args) => inType(pre) & inArgs(sym, args)
+ case TypeBounds(lo, hi) => inType(lo).flip & inType(hi)
+ case RefinedType(parents, defs) => inTypes(parents) & inSyms(defs.toList)
+ case MethodType(params, restpe) => inSyms(params).flip & inType(restpe)
+ case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe)
+ case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe)
+ case AnnotatedType(annots, tp, _) => inTypes(annots map (_.atp)) & inType(tp)
+ }
+
+ inType(tp)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
index 6170fcbb90..c9dfb7fe71 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
@@ -62,11 +62,8 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
writeByte((x & 0x7f).toInt)
}
- /** Write a natural number <code>x</code> at position <code>pos</code>.
+ /** Write a natural number `x` at position `pos`.
* If number is more than one byte, shift rest of array to make space.
- *
- * @param pos ...
- * @param x ...
*/
def patchNat(pos: Int, x: Int) {
def patchNatPrefix(x: Int) {
@@ -81,7 +78,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
if (y != 0) patchNatPrefix(y)
}
- /** Write a long number <code>x</code> in signed big endian format, base 256.
+ /** Write a long number `x` in signed big endian format, base 256.
*
* @param x The long number to be written.
*/
@@ -94,9 +91,6 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
// -- Basic input routines --------------------------------------------
- /** Peek at the current byte without moving the read index */
- def peekByte(): Int = bytes(readIndex)
-
/** Read a byte */
def readByte(): Int = {
val x = bytes(readIndex); readIndex += 1; x
@@ -112,7 +106,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
do {
b = readByte()
x = (x << 7) + (b & 0x7f)
- } while ((b & 0x80) != 0L);
+ } while ((b & 0x80) != 0L)
x
}
@@ -151,18 +145,14 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
result.toIndexedSeq
}
- /** Perform operation <code>op</code> until the condition
- * <code>readIndex == end</code> is satisfied.
+ /** Perform operation `op` until the condition
+ * `readIndex == end` is satisfied.
* Concatenate results into a list.
- *
- * @param end ...
- * @param op ...
- * @return ...
*/
def until[T](end: Int, op: () => T): List[T] =
- if (readIndex == end) List() else op() :: until(end, op);
+ if (readIndex == end) List() else op() :: until(end, op)
- /** Perform operation <code>op</code> the number of
+ /** Perform operation `op` the number of
* times specified. Concatenate the results into a list.
*/
def times[T](n: Int, op: ()=>T): List[T] =
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
index 16747af08a..3722c77aa2 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
@@ -56,7 +56,7 @@ object PickleFormat {
* | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
* | 43 ANNOTINFO len_Nat AnnotInfoBody
* | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
- * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
+ * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat /* no longer needed */
* | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
* | 49 TREE len_Nat 1 EMPTYtree
* | 49 TREE len_Nat 2 PACKAGEtree type_Ref sym_Ref mods_Ref name_Ref {tree_Ref}
@@ -115,7 +115,6 @@ object PickleFormat {
*/
val MajorVersion = 5
val MinorVersion = 0
- def VersionString = "V" + MajorVersion + "." + MinorVersion
final val TERMname = 1
final val TYPEname = 2
@@ -161,7 +160,7 @@ object PickleFormat {
final val ANNOTARGARRAY = 44
final val SUPERtpe = 46
- final val DEBRUIJNINDEXtpe = 47
+ final val DEBRUIJNINDEXtpe = 47 // no longer generated
final val EXISTENTIALtpe = 48
final val TREE = 49 // prefix code that means a tree is coming
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 603fff4f1c..3850f965b0 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -28,8 +28,8 @@ abstract class UnPickler {
* from an array of bytes.
* @param bytes bytearray from which we unpickle
* @param offset offset from which unpickling starts
- * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
- * @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable
+ * @param classRoot the top-level class which is unpickled, or NoSymbol if inapplicable
+ * @param moduleRoot the top-level module which is unpickled, or NoSymbol if inapplicable
* @param filename filename associated with bytearray, only used for error messages
*/
def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
@@ -159,9 +159,9 @@ abstract class UnPickler {
result
}
- /** If entry at <code>i</code> is undefined, define it by performing
- * operation <code>op</code> with <code>readIndex at start of i'th
- * entry. Restore <code>readIndex</code> afterwards.
+ /** If entry at `i` is undefined, define it by performing
+ * operation `op` with `readIndex at start of i'th
+ * entry. Restore `readIndex` afterwards.
*/
protected def at[T <: AnyRef](i: Int, op: () => T): T = {
var r = entries(i)
@@ -186,13 +186,12 @@ abstract class UnPickler {
case _ => errorBadSignature("bad name tag: " + tag)
}
}
- protected def readTermName(): TermName = readName().toTermName
- protected def readTypeName(): TypeName = readName().toTypeName
+ private def readEnd() = readNat() + readIndex
/** Read a symbol */
protected def readSymbol(): Symbol = {
val tag = readByte()
- val end = readNat() + readIndex
+ val end = readEnd()
def atEnd = readIndex == end
def readExtSymbol(): Symbol = {
@@ -325,7 +324,7 @@ abstract class UnPickler {
*/
protected def readType(forceProperType: Boolean = false): Type = {
val tag = readByte()
- val end = readNat() + readIndex
+ val end = readEnd()
(tag: @switch) match {
case NOtpe =>
NoType
@@ -344,7 +343,7 @@ abstract class UnPickler {
case TYPEREFtpe =>
val pre = readTypeRef()
val sym = readSymbolRef()
- var args = until(end, readTypeRef)
+ val args = until(end, readTypeRef)
TypeRef(pre, sym, args)
case TYPEBOUNDStpe =>
TypeBounds(readTypeRef(), readTypeRef())
@@ -431,7 +430,7 @@ abstract class UnPickler {
protected def readChildren() {
val tag = readByte()
assert(tag == CHILDREN)
- val end = readNat() + readIndex
+ val end = readEnd()
val target = readSymbolRef()
while (readIndex != end) target addChild readSymbolRef()
}
@@ -450,7 +449,7 @@ abstract class UnPickler {
*/
private def readArrayAnnot() = {
readByte() // skip the `annotargarray` tag
- val end = readNat() + readIndex
+ val end = readEnd()
until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag)
}
protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
@@ -486,7 +485,7 @@ abstract class UnPickler {
val tag = readByte()
if (tag != SYMANNOT)
errorBadSignature("symbol annotation expected ("+ tag +")")
- val end = readNat() + readIndex
+ val end = readEnd()
val target = readSymbolRef()
target.addAnnotation(readAnnotationInfo(end))
}
@@ -497,7 +496,7 @@ abstract class UnPickler {
val tag = readByte()
if (tag != ANNOTINFO)
errorBadSignature("annotation expected (" + tag + ")")
- val end = readNat() + readIndex
+ val end = readEnd()
readAnnotationInfo(end)
}
@@ -506,7 +505,7 @@ abstract class UnPickler {
val outerTag = readByte()
if (outerTag != TREE)
errorBadSignature("tree expected (" + outerTag + ")")
- val end = readNat() + readIndex
+ val end = readEnd()
val tag = readByte()
val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
@@ -764,7 +763,8 @@ abstract class UnPickler {
val tag = readNat()
if (tag != MODIFIERS)
errorBadSignature("expected a modifiers tag (" + tag + ")")
- val end = readNat() + readIndex
+
+ readEnd()
val pflagsHi = readNat()
val pflagsLo = readNat()
val pflags = (pflagsHi.toLong << 32) + pflagsLo
@@ -796,7 +796,6 @@ abstract class UnPickler {
protected def readTreeRef(): Tree = at(readNat(), readTree)
protected def readTypeNameRef(): TypeName = readNameRef().toTypeName
- protected def readTermNameRef(): TermName = readNameRef().toTermName
protected def readTemplateRef(): Template =
readTreeRef() match {
@@ -843,7 +842,6 @@ abstract class UnPickler {
* error reporting, so we rely on the typechecker to report the error).
*/
def toTypeError(e: MissingRequirementError) = {
- // e.printStackTrace()
new TypeError(e.msg)
}
@@ -853,7 +851,7 @@ abstract class UnPickler {
private val p = phase
override def complete(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
- atPhase(p) (sym setInfo tp)
+ enteringPhase(p) (sym setInfo tp)
if (currentRunId != definedAtRunId)
sym.setInfo(adaptToNewRunMap(tp))
}
@@ -871,7 +869,7 @@ abstract class UnPickler {
super.complete(sym)
var alias = at(j, readSymbol)
if (alias.isOverloaded)
- alias = atPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
+ alias = enteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
sym.asInstanceOf[TermSymbol].setAlias(alias)
}
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 81368df7a6..506edb861e 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -40,11 +40,13 @@ abstract class MutableSettings extends AbsSettings {
def verbose: BooleanSetting
def uniqid: BooleanSetting
def Yshowsymkinds: BooleanSetting
+ def Yposdebug: BooleanSetting
+ def Yrangepos: BooleanSetting
def Xprintpos: BooleanSetting
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
def Xexperimental: BooleanSetting
- def XoldPatmat: BooleanSetting
def XnoPatmatAnalysis: BooleanSetting
def XfullLubs: BooleanSetting
+ def breakCycles: BooleanSetting
}
diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
new file mode 100644
index 0000000000..e5ddd8f359
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
@@ -0,0 +1,50 @@
+package scala.reflect
+package internal
+package tpe
+
+private[internal] trait CommonOwners {
+ self: SymbolTable =>
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given type.
+ */
+ protected[internal] def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given list
+ * of types.
+ */
+ protected[internal] def commonOwner(tps: List[Type]): Symbol = {
+ if (tps.isEmpty) NoSymbol
+ else {
+ commonOwnerMap.clear()
+ tps foreach (commonOwnerMap traverse _)
+ if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
+ }
+ }
+
+ protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
+
+ protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
+ var result: Symbol = _
+
+ def clear() { result = null }
+
+ private def register(sym: Symbol) {
+ // First considered type is the trivial result.
+ if ((result eq null) || (sym eq NoSymbol))
+ result = sym
+ else
+ while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
+ result = result.owner
+ }
+ def traverse(tp: Type) = tp.normalize match {
+ case ThisType(sym) => register(sym)
+ case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
+ case SingleType(NoPrefix, sym) => register(sym.owner)
+ case _ => mapOver(tp)
+ }
+ }
+
+ private lazy val commonOwnerMapObj = new CommonOwnerMap
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
new file mode 100644
index 0000000000..bdccc75d6d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -0,0 +1,592 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable }
+import util.Statistics
+import Variance._
+
+private[internal] trait GlbLubs {
+ self: SymbolTable =>
+ import definitions._
+ import TypesStats._
+
+ private final val printLubs = sys.props contains "scalac.debug.lub"
+
+ /** In case anyone wants to turn off lub verification without reverting anything. */
+ private final val verifyLubs = true
+
+
+ private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
+ import util.TableDef
+ import TableDef.Column
+ def str(tp: Type) = {
+ if (tp == NoType) ""
+ else {
+ val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
+ if (s.length < 60) s
+ else (s take 57) + "..."
+ }
+ }
+
+ val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
+ val maxSeqLength = sorted.map(_._2.size).max
+ val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
+ val transposed = padded.transpose
+
+ val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
+ case ((k, v), idx) =>
+ Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true)
+ }
+
+ val tableDef = TableDef(columns: _*)
+ val formatted = tableDef.table(transposed)
+ println("** Depth is " + depth + "\n" + formatted)
+ }
+
+ /** From a list of types, find any which take type parameters
+ * where the type parameter bounds contain references to other
+ * any types in the list (including itself.)
+ *
+ * @return List of symbol pairs holding the recursive type
+ * parameter and the parameter which references it.
+ */
+ def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
+ if (ts.isEmpty) Nil
+ else {
+ val sym = ts.head.typeSymbol
+ require(ts.tail forall (_.typeSymbol == sym), ts)
+ for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
+ p -> in
+ }
+ }
+
+ /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
+ * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
+ *
+ * xs <= ys iff forall y in ys exists x in xs such that x <: y
+ *
+ * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
+ * (these type parameters may be referred to by type arguments in the BTS column of those types,
+ * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
+ * @arg tsBts a matrix whose columns are basetype sequences
+ * the first row is the original list of types for which we're computing the lub
+ * (except that type constructors have been applied to their dummyArgs)
+ * @See baseTypeSeq for a definition of sorted and upwards closed.
+ */
+ def lubList(ts: List[Type], depth: Int): List[Type] = {
+ var lubListDepth = 0
+ // This catches some recursive situations which would otherwise
+ // befuddle us, e.g. pos/hklub0.scala
+ def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol))
+
+ def elimHigherOrderTypeParam(tp: Type) = tp match {
+ case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) =>
+ logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor)
+ case _ => tp
+ }
+ // pretypes is a tail-recursion-preserving accumulator.
+ @annotation.tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
+ lubListDepth += 1
+
+ if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse
+ else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head
+ else {
+ // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
+ // Invariant: all symbols "under" (closer to the first row) the frontier
+ // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
+ val ts0 = tsBts map (_.head)
+
+ // Is the frontier made up of types with the same symbol?
+ val isUniformFrontier = (ts0: @unchecked) match {
+ case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
+ }
+
+ // Produce a single type for this frontier by merging the prefixes and arguments of those
+ // typerefs that share the same symbol: that symbol is the current maximal symbol for which
+ // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
+ // merging, strip targs that refer to bound tparams (when we're computing the lub of type
+ // constructors.) Also filter out all types that are a subtype of some other type.
+ if (isUniformFrontier) {
+ val fbounds = findRecursiveBounds(ts0) map (_._2)
+ val tcLubList = typeConstructorLubList(ts0)
+ def isRecursive(tp: Type) = tp.typeSymbol.typeParams exists fbounds.contains
+
+ val ts1 = ts0 map { t =>
+ if (isRecursive(t)) {
+ tcLubList map (t baseType _.typeSymbol) find (t => !isRecursive(t)) match {
+ case Some(tp) => logResult(s"Breaking recursion in lublist, substituting weaker type.\n Was: $t\n Now")(tp)
+ case _ => t
+ }
+ }
+ else t
+ }
+ val tails = tsBts map (_.tail)
+ mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, Covariant, depth) match {
+ case Some(tp) => loop(tp :: pretypes, tails)
+ case _ => loop(pretypes, tails)
+ }
+ }
+ else {
+ // frontier is not uniform yet, move it beyond the current minimal symbol;
+ // lather, rinSe, repeat
+ val sym = minSym(ts0)
+ val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
+ if (printLubs) {
+ val str = (newtps.zipWithIndex map { case (tps, idx) =>
+ tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
+ }).mkString("")
+
+ println("Frontier(\n" + str + ")")
+ printLubMatrix((ts zip tsBts).toMap, lubListDepth)
+ }
+
+ loop(pretypes, newtps)
+ }
+ }
+ }
+
+ val initialBTSes = ts map (_.baseTypeSeq.toList)
+ if (printLubs)
+ printLubMatrix((ts zip initialBTSes).toMap, depth)
+
+ loop(Nil, initialBTSes)
+ }
+
+ /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
+ private def minSym(tps: List[Type]): Symbol =
+ (tps.head.typeSymbol /: tps.tail) {
+ (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
+ }
+
+ /** A minimal type list which has a given list of types as its base type sequence */
+ def spanningTypes(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case first :: rest =>
+ first :: spanningTypes(
+ rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
+ }
+
+ /** Eliminate from list of types all elements which are a supertype
+ * of some other element of the list. */
+ private def elimSuper(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
+ if (rest exists (t1 => t1 <:< t)) rest else t :: rest
+ }
+
+ /** Eliminate from list of types all elements which are a subtype
+ * of some other element of the list. */
+ private def elimSub(ts: List[Type], depth: Int): List[Type] = {
+ def elimSub0(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
+ if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
+ }
+ val ts0 = elimSub0(ts)
+ if (ts0.isEmpty || ts0.tail.isEmpty) ts0
+ else {
+ val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
+ if (ts1 eq ts0) ts0
+ else elimSub(ts1, depth)
+ }
+ }
+
+ private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
+ val quantified = ts flatMap {
+ case ExistentialType(qs, _) => qs
+ case t => List()
+ }
+ def stripType(tp: Type): Type = tp match {
+ case ExistentialType(_, res) =>
+ res
+ case tv@TypeVar(_, constr) =>
+ if (tv.instValid) stripType(constr.inst)
+ else if (tv.untouchable) tv
+ else abort("trying to do lub/glb of typevar "+tp)
+ case t => t
+ }
+ val strippedTypes = ts mapConserve stripType
+ (strippedTypes, quantified)
+ }
+
+ def weakLub(ts: List[Type]) =
+ if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
+ else if (ts exists typeHasAnnotations)
+ (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
+ else (lub(ts), false)
+
+ def numericLub(ts: List[Type]) =
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t2
+ else if (isNumericSubType(t2, t1)) t1
+ else IntClass.tpe)
+
+ private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
+ private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
+
+ /** Given a list of types, finds all the base classes they have in
+ * common, then returns a list of type constructors derived directly
+ * from the symbols (so any more specific type information is ignored.)
+ * The list is filtered such that every type constructor in the list
+ * expects the same number of type arguments, which is chosen based
+ * on the deepest class among the common baseclasses.
+ */
+ def typeConstructorLubList(ts: List[Type]): List[Type] = {
+ val bcs = ts.flatMap(_.baseClasses).distinct sortWith (_ isLess _)
+ val tcons = bcs filter (clazz => ts forall (_.typeSymbol isSubClass clazz))
+
+ tcons map (_.typeConstructor) match {
+ case Nil => Nil
+ case t :: ts => t :: ts.filter(_.typeParams.size == t.typeParams.size)
+ }
+ }
+
+ def lub(ts: List[Type]): Type = ts match {
+ case List() => NothingClass.tpe
+ case List(t) => t
+ case _ =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ val res = lub(ts, lubDepth(ts))
+ // If the number of unapplied type parameters in all incoming
+ // types is consistent, and the lub does not match that, return
+ // the type constructor of the calculated lub instead. This
+ // is because lubbing type constructors tends to result in types
+ // which have been applied to dummies or Nothing.
+ ts.map(_.typeParams.size).distinct match {
+ case x :: Nil if res.typeParams.size != x =>
+ logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor)
+ case _ =>
+ res
+ }
+ }
+ finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ /** The least upper bound wrt <:< of a list of types */
+ protected[internal] def lub(ts: List[Type], depth: Int): Type = {
+ def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
+ case List() => NothingClass.tpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts @ AnnotatedType(annots, tpe, _) :: rest =>
+ annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
+ case ts =>
+ lubResults get (depth, ts) match {
+ case Some(lubType) =>
+ lubType
+ case None =>
+ lubResults((depth, ts)) = AnyClass.tpe
+ val res = if (depth < 0) AnyClass.tpe else lub1(ts)
+ lubResults((depth, ts)) = res
+ res
+ }
+ }
+ def lub1(ts0: List[Type]): Type = {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val lubBaseTypes: List[Type] = lubList(ts, depth)
+ val lubParents = spanningTypes(lubBaseTypes)
+ val lubOwner = commonOwner(ts)
+ val lubBase = intersectionType(lubParents, lubOwner)
+ val lubType =
+ if (phase.erasedTypes || depth == 0 ) lubBase
+ else {
+ val lubRefined = refinedType(lubParents, lubOwner)
+ val lubThisType = lubRefined.typeSymbol.thisType
+ val narrowts = ts map (_.narrow)
+ def excludeFromLub(sym: Symbol) = (
+ sym.isClass
+ || sym.isConstructor
+ || !sym.isPublic
+ || isGetClass(sym)
+ || sym.isFinal
+ || narrowts.exists(t => !refines(t, sym))
+ )
+ def lubsym(proto: Symbol): Symbol = {
+ val prototp = lubThisType.memberInfo(proto)
+ val syms = narrowts map (t =>
+ t.nonPrivateMember(proto.name).suchThat(sym =>
+ sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
+
+ if (syms contains NoSymbol) NoSymbol
+ else {
+ val symtypes =
+ map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+ if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
+ else if (symtypes.tail forall (symtypes.head =:= _))
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
+ else {
+ def lubBounds(bnds: List[TypeBounds]): TypeBounds =
+ TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
+ lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
+ .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
+ }
+ }
+ }
+ def refines(tp: Type, sym: Symbol): Boolean = {
+ val syms = tp.nonPrivateMember(sym.name).alternatives
+ !syms.isEmpty && (syms forall (alt =>
+ // todo alt != sym is strictly speaking not correct, but without it we lose
+ // efficiency.
+ alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
+ }
+ // add a refinement symbol for all non-class members of lubBase
+ // which are refined by every type in ts.
+ for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
+ try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth))
+ catch {
+ case ex: NoCommonType =>
+ }
+ }
+ if (lubRefined.decls.isEmpty) lubBase
+ else if (!verifyLubs) lubRefined
+ else {
+ // Verify that every given type conforms to the calculated lub.
+ // In theory this should not be necessary, but higher-order type
+ // parameters are not handled correctly.
+ val ok = ts forall { t =>
+ isSubType(t, lubRefined, depth) || {
+ if (settings.debug.value || printLubs) {
+ Console.println(
+ "Malformed lub: " + lubRefined + "\n" +
+ "Argument " + t + " does not conform. Falling back to " + lubBase
+ )
+ }
+ false
+ }
+ }
+ // If not, fall back on the more conservative calculation.
+ if (ok) lubRefined
+ else lubBase
+ }
+ }
+ // dropIllegalStarTypes is a localized fix for SI-6897. We should probably
+ // integrate that transformation at a lower level in master, but lubs are
+ // the likely and maybe only spot they escape, so fixing here for 2.10.1.
+ existentialAbstraction(tparams, dropIllegalStarTypes(lubType))
+ }
+ if (printLubs) {
+ println(indent + "lub of " + ts + " at depth "+depth)//debug
+ indent = indent + " "
+ assert(indent.length <= 100)
+ }
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ val res = lub0(ts)
+ if (printLubs) {
+ indent = indent stripSuffix " "
+ println(indent + "lub of " + ts + " is " + res)//debug
+ }
+ if (ts forall typeIsNotNull) res.notNull else res
+ }
+
+ val GlbFailure = new Throwable
+
+ /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
+ * call in `glb`. There's a possible infinite recursion when `specializes` calls
+ * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
+ * The counter breaks this recursion after two calls.
+ * If the recursion is broken, no member is added to the glb.
+ */
+ private var globalGlbDepth = 0
+ private final val globalGlbLimit = 2
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`). */
+ def glb(ts: List[Type]): Type = elimSuper(ts) match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts0 =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ glbNorm(ts0, lubDepth(ts0))
+ } finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ protected[internal] def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts0 => glbNorm(ts0, depth)
+ }
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+ * with regard to `elimSuper`. */
+ protected def glbNorm(ts: List[Type], depth: Int): Type = {
+ def glb0(ts0: List[Type]): Type = ts0 match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
+ case ts =>
+ glbResults get (depth, ts) match {
+ case Some(glbType) =>
+ glbType
+ case _ =>
+ glbResults((depth, ts)) = NothingClass.tpe
+ val res = if (depth < 0) NothingClass.tpe else glb1(ts)
+ glbResults((depth, ts)) = res
+ res
+ }
+ }
+ def glb1(ts0: List[Type]): Type = {
+ try {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val glbOwner = commonOwner(ts)
+ def refinedToParents(t: Type): List[Type] = t match {
+ case RefinedType(ps, _) => ps flatMap refinedToParents
+ case _ => List(t)
+ }
+ def refinedToDecls(t: Type): List[Scope] = t match {
+ case RefinedType(ps, decls) =>
+ val dss = ps flatMap refinedToDecls
+ if (decls.isEmpty) dss else decls :: dss
+ case _ => List()
+ }
+ val ts1 = ts flatMap refinedToParents
+ val glbBase = intersectionType(ts1, glbOwner)
+ val glbType =
+ if (phase.erasedTypes || depth == 0) glbBase
+ else {
+ val glbRefined = refinedType(ts1, glbOwner)
+ val glbThisType = glbRefined.typeSymbol.thisType
+ def glbsym(proto: Symbol): Symbol = {
+ val prototp = glbThisType.memberInfo(proto)
+ val syms = for (t <- ts;
+ alt <- (t.nonPrivateMember(proto.name).alternatives)
+ if glbThisType.memberInfo(alt) matches prototp
+ ) yield alt
+ val symtypes = syms map glbThisType.memberInfo
+ assert(!symtypes.isEmpty)
+ proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
+ if (proto.isTerm) glb(symtypes, decr(depth))
+ else {
+ def isTypeBound(tp: Type) = tp match {
+ case TypeBounds(_, _) => true
+ case _ => false
+ }
+ def glbBounds(bnds: List[Type]): TypeBounds = {
+ val lo = lub(bnds map (_.bounds.lo), decr(depth))
+ val hi = glb(bnds map (_.bounds.hi), decr(depth))
+ if (lo <:< hi) TypeBounds(lo, hi)
+ else throw GlbFailure
+ }
+ val symbounds = symtypes filter isTypeBound
+ var result: Type =
+ if (symbounds.isEmpty)
+ TypeBounds.empty
+ else glbBounds(symbounds)
+ for (t <- symtypes if !isTypeBound(t))
+ if (result.bounds containsType t) result = t
+ else throw GlbFailure
+ result
+ })
+ }
+ if (globalGlbDepth < globalGlbLimit)
+ try {
+ globalGlbDepth += 1
+ val dss = ts flatMap refinedToDecls
+ for (ds <- dss; sym <- ds.iterator)
+ if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
+ try {
+ addMember(glbThisType, glbRefined, glbsym(sym), depth)
+ } catch {
+ case ex: NoCommonType =>
+ }
+ } finally {
+ globalGlbDepth -= 1
+ }
+ if (glbRefined.decls.isEmpty) glbBase else glbRefined
+ }
+ existentialAbstraction(tparams, glbType)
+ } catch {
+ case GlbFailure =>
+ if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
+ else NothingClass.tpe
+ }
+ }
+ // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
+
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ val res = glb0(ts)
+
+ // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
+
+ if (ts exists typeIsNotNull) res.notNull else res
+ }
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of list of bounds infos, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
+ def getBounds(tp: Type): List[Type] = tp match {
+ case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
+ tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
+ case tp =>
+ if (tp ne tp.normalize) getBounds(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map getBounds
+ }
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of instance types, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
+ def transformResultType(tp: Type): Type = tp match {
+ case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
+ restpe.substSym(tparams1, tparams)
+ case tp =>
+ if (tp ne tp.normalize) transformResultType(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map transformResultType
+ }
+
+ /** All types in list must be method types with equal parameter types.
+ * Returns list of their result types.
+ */
+ private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
+ tps map {
+ case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
+ res
+ case NullaryMethodType(res) if pts.isEmpty =>
+ res
+ case _ =>
+ throw new NoCommonType(tps)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
new file mode 100644
index 0000000000..82321f61c2
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -0,0 +1,617 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable }
+import Flags._
+import util.Statistics
+
+trait TypeComparers {
+ self: SymbolTable =>
+ import definitions._
+ import TypesStats._
+
+ private final val LogPendingSubTypesThreshold = DefaultLogThreshhold
+
+ private val pendingSubTypes = new mutable.HashSet[SubTypePair]
+
+ class SubTypePair(val tp1: Type, val tp2: Type) {
+ override def hashCode = tp1.hashCode * 41 + tp2.hashCode
+ override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match {
+ // suspend TypeVars in types compared by =:=,
+ // since we don't want to mutate them simply to check whether a subtype test is pending
+ // in addition to making subtyping "more correct" for type vars,
+ // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
+ // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
+ case stp: SubTypePair =>
+ val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t))
+ suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2)
+ case _ =>
+ false
+ })
+ override def toString = tp1+" <:<? "+tp2
+ }
+
+ private var subsametypeRecursions: Int = 0
+
+ private def isUnifiable(pre1: Type, pre2: Type) =
+ (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
+
+ /** Returns true iff we are past phase specialize,
+ * sym1 and sym2 are two existential skolems with equal names and bounds,
+ * and pre1 and pre2 are equal prefixes
+ */
+ private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
+ sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
+ sym1.name == sym2.name &&
+ phase.specialized &&
+ sym1.info =:= sym2.info &&
+ pre1 =:= pre2
+ }
+
+ private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
+ if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
+ if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2")
+ true
+ } else
+ false
+
+ private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
+ if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+
+
+ def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
+ subsametypeRecursions += 1
+ undoLog undo { // undo type constraints that arise from operations in this block
+ !isSameType1(tp1, tp2)
+ }
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case TypeRef(pre1, sym1, _) =>
+ tp2 match {
+ case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
+ case _ => true
+ }
+ case _ => true
+ }
+
+ /** Do `tp1` and `tp2` denote equivalent types? */
+ def isSameType(tp1: Type, tp2: Type): Boolean = try {
+ if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
+ subsametypeRecursions += 1
+ //OPT cutdown on Function0 allocation
+ //was:
+ // undoLog undoUnless {
+ // isSameType1(tp1, tp2)
+ // }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+ try {
+ result = isSameType1(tp1, tp2)
+ }
+ finally if (!result) undoLog.undoTo(before)
+ result
+ }
+ finally undoLog.unlock()
+ }
+ finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = {
+ if ((tp1 eq tp2) ||
+ (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
+ (tp2 eq ErrorType) || (tp2 eq WildcardType))
+ true
+ else if ((tp1 eq NoType) || (tp2 eq NoType))
+ false
+ else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
+ tp2.typeSymbol.isPackageClass
+ else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
+ tp1.typeSymbol.isPackageClass
+ else {
+ isSameType2(tp1, tp2) || {
+ val tp1n = normalizePlus(tp1)
+ val tp2n = normalizePlus(tp2)
+ ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
+ }
+ }
+ }
+
+ def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ tp1 match {
+ case tr1: TypeRef =>
+ tp2 match {
+ case tr2: TypeRef =>
+ return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
+ ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
+ isSameTypes(tr1.args, tr2.args))) ||
+ ((tr1.pre, tr2.pre) match {
+ case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
+ case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
+ case _ => false
+ })
+ case _: SingleType =>
+ return isSameType2(tp2, tp1) // put singleton type on the left, caught below
+ case _ =>
+ }
+ case tt1: ThisType =>
+ tp2 match {
+ case tt2: ThisType =>
+ if (tt1.sym == tt2.sym) return true
+ case _ =>
+ }
+ case st1: SingleType =>
+ tp2 match {
+ case st2: SingleType =>
+ if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
+ case TypeRef(pre2, sym2, Nil) =>
+ if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
+ case _ =>
+ }
+ case ct1: ConstantType =>
+ tp2 match {
+ case ct2: ConstantType =>
+ return (ct1.value == ct2.value)
+ case _ =>
+ }
+ case rt1: RefinedType =>
+ tp2 match {
+ case rt2: RefinedType => //
+ def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
+ sym2 =>
+ var e1 = s1.lookupEntry(sym2.name)
+ (e1 ne null) && {
+ val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
+ var isEqual = false
+ while (!isEqual && (e1 ne null)) {
+ isEqual = e1.sym.info =:= substSym
+ e1 = s1.lookupNextEntry(e1)
+ }
+ isEqual
+ }
+ }
+ //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
+ return isSameTypes(rt1.parents, rt2.parents) && {
+ val decls1 = rt1.decls
+ val decls2 = rt2.decls
+ isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
+ }
+ case _ =>
+ }
+ case mt1: MethodType =>
+ tp2 match {
+ case mt2: MethodType =>
+ return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
+ mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
+ mt1.isImplicit == mt2.isImplicit
+ // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
+ case _ =>
+ }
+ case NullaryMethodType(restpe1) =>
+ tp2 match {
+ // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
+ case NullaryMethodType(restpe2) =>
+ return restpe1 =:= restpe2
+ case _ =>
+ }
+ case PolyType(tparams1, res1) =>
+ tp2 match {
+ case PolyType(tparams2, res2) =>
+ // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case ExistentialType(tparams1, res1) =>
+ tp2 match {
+ case ExistentialType(tparams2, res2) =>
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
+ sameLength(tparams1, tparams2) &&
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case TypeBounds(lo1, hi1) =>
+ tp2 match {
+ case TypeBounds(lo2, hi2) =>
+ return lo1 =:= lo2 && hi1 =:= hi2
+ case _ =>
+ }
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp2
+ case _ =>
+ }
+ tp2 match {
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp1
+ case _ =>
+ }
+ tp1 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp2, typeVarLHS = true)
+ case _ =>
+ }
+ tp2 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp1, typeVarLHS = false)
+ case _ =>
+ }
+ tp1 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp2 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp1 match {
+ case _: SingletonType =>
+ tp2 match {
+ case _: SingletonType =>
+ def chaseDealiasedUnderlying(tp: Type): Type = {
+ var origin = tp
+ var next = origin.underlying.dealias
+ while (next.isInstanceOf[SingletonType]) {
+ assert(origin ne next, origin)
+ origin = next
+ next = origin.underlying.dealias
+ }
+ origin
+ }
+ val origin1 = chaseDealiasedUnderlying(tp1)
+ val origin2 = chaseDealiasedUnderlying(tp2)
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+ }
+
+ def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
+
+ def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
+ subsametypeRecursions += 1
+
+ //OPT cutdown on Function0 allocation
+ //was:
+ // undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
+ // if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ // val p = new SubTypePair(tp1, tp2)
+ // if (pendingSubTypes(p))
+ // false
+ // else
+ // try {
+ // pendingSubTypes += p
+ // isSubType2(tp1, tp2, depth)
+ // } finally {
+ // pendingSubTypes -= p
+ // }
+ // } else {
+ // isSubType2(tp1, tp2, depth)
+ // }
+ // }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+
+ try result = { // if subtype test fails, it should not affect constraints on typevars
+ if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ val p = new SubTypePair(tp1, tp2)
+ if (pendingSubTypes(p))
+ false
+ else
+ try {
+ pendingSubTypes += p
+ isSubType2(tp1, tp2, depth)
+ } finally {
+ pendingSubTypes -= p
+ }
+ } else {
+ isSubType2(tp1, tp2, depth)
+ }
+ } finally if (!result) undoLog.undoTo(before)
+
+ result
+ } finally undoLog.unlock()
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
+ val PolyType(tparams1, res1) = tp1
+ val PolyType(tparams2, res2) = tp2
+
+ sameLength(tparams1, tparams2) && {
+ // fast-path: polymorphic method type -- type params cannot be captured
+ val isMethod = tparams1.head.owner.isMethod
+ //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala
+ val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1)
+ def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes)
+ def sub2(tp: Type) = tp.substSym(tparams2, substitutes)
+ def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info)
+
+ (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2))
+ }
+ }
+
+ // @assume tp1.isHigherKinded || tp2.isHigherKinded
+ def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match {
+ case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side
+ case (_, TypeRef(_, NothingClass, _)) => false
+ case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType)
+ case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side
+ case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes
+ def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s"
+ devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}")
+ false
+ }
+
+ ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type
+ || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?)
+ || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's
+ )
+ }
+
+ /** Does type `tp1` conform to `tp2`? */
+ private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
+ if ((tp1 eq NoType) || (tp2 eq NoType)) return false
+ if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
+ if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
+ if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
+ if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth)
+
+ /** First try, on the right:
+ * - unwrap Annotated types, BoundedWildcardTypes,
+ * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
+ * - handle common cases for first-kind TypeRefs on both sides as a fast path.
+ */
+ def firstTry = tp2 match {
+ // fast path: two typerefs, none of them HK
+ case tr2: TypeRef =>
+ tp1 match {
+ case tr1: TypeRef =>
+ val sym1 = tr1.sym
+ val sym2 = tr2.sym
+ val pre1 = tr1.pre
+ val pre2 = tr2.pre
+ (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
+ else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
+ (isUnifiable(pre1, pre2) ||
+ isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
+ sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
+ isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
+ ||
+ sym2.isClass && {
+ val base = tr1 baseType sym2
+ (base ne tr1) && isSubType(base, tr2, depth)
+ }
+ ||
+ thirdTryRef(tr1, tr2))
+ case _ =>
+ secondTry
+ }
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1, bounds.hi, depth)
+ case tv2 @ TypeVar(_, constr2) =>
+ tp1 match {
+ case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
+ secondTry
+ case _ =>
+ tv2.registerBound(tp1, isLowerBound = true)
+ }
+ case _ =>
+ secondTry
+ }
+
+ /** Second try, on the left:
+ * - unwrap AnnotatedTypes, BoundedWildcardTypes,
+ * - bind typevars,
+ * - handle existential types by skolemization.
+ */
+ def secondTry = tp1 match {
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1.bounds.lo, tp2, depth)
+ case tv @ TypeVar(_,_) =>
+ tv.registerBound(tp2, isLowerBound = false)
+ case ExistentialType(_, _) =>
+ try {
+ skolemizationLevel += 1
+ isSubType(tp1.skolemizeExistential, tp2, depth)
+ } finally {
+ skolemizationLevel -= 1
+ }
+ case _ =>
+ thirdTry
+ }
+
+ def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
+ val sym2 = tp2.sym
+ sym2 match {
+ case NotNullClass => tp1.isNotNull
+ case SingletonClass => tp1.isStable || fourthTry
+ case _: ClassSymbol =>
+ if (isRawType(tp2))
+ isSubType(tp1, rawToExistential(tp2), depth)
+ else if (sym2.name == tpnme.REFINE_CLASS_NAME)
+ isSubType(tp1, sym2.info, depth)
+ else
+ fourthTry
+ case _: TypeSymbol =>
+ if (sym2 hasFlag DEFERRED) {
+ val tp2a = tp2.bounds.lo
+ isDifferentTypeConstructor(tp2, tp2a) &&
+ isSubType(tp1, tp2a, depth) ||
+ fourthTry
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ fourthTry
+ }
+ }
+
+ /** Third try, on the right:
+ * - decompose refined types.
+ * - handle typerefs, existentials, and notnull types.
+ * - handle left+right method types, polytypes, typebounds
+ */
+ def thirdTry = tp2 match {
+ case tr2: TypeRef =>
+ thirdTryRef(tp1, tr2)
+ case rt2: RefinedType =>
+ (rt2.parents forall (isSubType(tp1, _, depth))) &&
+ (rt2.decls forall (specializesSym(tp1, _, depth)))
+ case et2: ExistentialType =>
+ et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
+ case nn2: NotNullType =>
+ tp1.isNotNull && isSubType(tp1, nn2.underlying, depth)
+ case mt2: MethodType =>
+ tp1 match {
+ case mt1 @ MethodType(params1, res1) =>
+ val params2 = mt2.params
+ val res2 = mt2.resultType
+ (sameLength(params1, params2) &&
+ mt1.isImplicit == mt2.isImplicit &&
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
+ isSubType(res1.substSym(params1, params2), res2, depth))
+ // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
+ case _ =>
+ false
+ }
+ case pt2 @ NullaryMethodType(_) =>
+ tp1 match {
+ // TODO: consider MethodType mt for which mt.params.isEmpty??
+ case pt1 @ NullaryMethodType(_) =>
+ isSubType(pt1.resultType, pt2.resultType, depth)
+ case _ =>
+ false
+ }
+ case TypeBounds(lo2, hi2) =>
+ tp1 match {
+ case TypeBounds(lo1, hi1) =>
+ isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
+ case _ =>
+ false
+ }
+ case _ =>
+ fourthTry
+ }
+
+ /** Fourth try, on the left:
+ * - handle typerefs, refined types, notnull and singleton types.
+ */
+ def fourthTry = tp1 match {
+ case tr1 @ TypeRef(pre1, sym1, _) =>
+ sym1 match {
+ case NothingClass => true
+ case NullClass =>
+ tp2 match {
+ case TypeRef(_, sym2, _) =>
+ containsNull(sym2)
+ case _ =>
+ isSingleType(tp2) && isSubType(tp1, tp2.widen, depth)
+ }
+ case _: ClassSymbol =>
+ if (isRawType(tp1))
+ isSubType(rawToExistential(tp1), tp2, depth)
+ else if (sym1.isModuleClass) tp2 match {
+ case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
+ case _ => false
+ }
+ else if (sym1.isRefinementClass)
+ isSubType(sym1.info, tp2, depth)
+ else false
+
+ case _: TypeSymbol =>
+ if (sym1 hasFlag DEFERRED) {
+ val tp1a = tp1.bounds.hi
+ isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth)
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ false
+ }
+ case RefinedType(parents1, _) =>
+ parents1 exists (isSubType(_, tp2, depth))
+ case _: SingletonType | _: NotNullType =>
+ isSubType(tp1.underlying, tp2, depth)
+ case _ =>
+ false
+ }
+
+ firstTry
+ }
+
+
+ def isWeakSubType(tp1: Type, tp2: Type) =
+ tp1.deconst.normalize match {
+ case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
+ tp2.deconst.normalize match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ isNumericSubClass(sym1, sym2)
+ case tv2 @ TypeVar(_, _) =>
+ tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case tv1 @ TypeVar(_, _) =>
+ tp2.deconst.normalize match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+
+ /** The isNumericValueType tests appear redundant, but without them
+ * test/continuations-neg/function3.scala goes into an infinite loop.
+ * (Even if the calls are to typeSymbolDirect.)
+ */
+ def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
+ isNumericValueType(tp1)
+ && isNumericValueType(tp2)
+ && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
+ )
+
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
new file mode 100644
index 0000000000..a002b01f70
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -0,0 +1,282 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ generic }
+import generic.Clearable
+
+
+private[internal] trait TypeConstraints {
+ self: SymbolTable =>
+ import definitions._
+
+ /** A log of type variable with their original constraints. Used in order
+ * to undo constraints in the case of isSubType/isSameType failure.
+ */
+ lazy val undoLog = newUndoLog
+
+ protected def newUndoLog = new UndoLog
+
+ class UndoLog extends Clearable {
+ private type UndoPairs = List[(TypeVar, TypeConstraint)]
+ //OPT this method is public so we can do `manual inlining`
+ var log: UndoPairs = List()
+
+ /*
+ * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
+ *
+ * The idea behind explicit locking mechanism is that all public methods that access mutable state
+ * will have to obtain the lock for their entire execution so both reads and writes can be kept in
+ * right order. Originally, that was achieved by overriding those public methods in
+ * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
+ * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
+ * can go away.
+ *
+ * By using explicit locking we can achieve inlining.
+ *
+ * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
+ * places implementation of `undo` or `undoUnless`). This should be changed back to protected
+ * once inliner is fixed.
+ */
+ def lock(): Unit = ()
+ def unlock(): Unit = ()
+
+ // register with the auto-clearing cache manager
+ perRunCaches.recordCache(this)
+
+ /** Undo all changes to constraints to type variables upto `limit`. */
+ //OPT this method is public so we can do `manual inlining`
+ def undoTo(limit: UndoPairs) {
+ assertCorrectThread()
+ while ((log ne limit) && log.nonEmpty) {
+ val (tv, constr) = log.head
+ tv.constr = constr
+ log = log.tail
+ }
+ }
+
+ /** No sync necessary, because record should only
+ * be called from within an undo or undoUnless block,
+ * which is already synchronized.
+ */
+ private[reflect] def record(tv: TypeVar) = {
+ log ::= ((tv, tv.constr.cloneInternal))
+ }
+
+ def clear() {
+ lock()
+ try {
+ if (settings.debug.value)
+ self.log("Clearing " + log.size + " entries from the undoLog.")
+ log = Nil
+ } finally unlock()
+ }
+
+ // `block` should not affect constraints on typevars
+ def undo[T](block: => T): T = {
+ lock()
+ try {
+ val before = log
+
+ try block
+ finally undoTo(before)
+ } finally unlock()
+ }
+ }
+
+ /** @PP: Unable to see why these apparently constant types should need vals
+ * in every TypeConstraint, I lifted them out.
+ */
+ private lazy val numericLoBound = IntClass.tpe
+ private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
+
+ /** A class expressing upper and lower bounds constraints of type variables,
+ * as well as their instantiations.
+ */
+ class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
+ def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
+ def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
+ def this() = this(List(), List())
+
+ /* Syncnote: Type constraints are assumed to be used from only one
+ * thread. They are not exposed in api.Types and are used only locally
+ * in operations that are exposed from types. Hence, no syncing of any
+ * variables should be ncessesary.
+ */
+
+ /** Guard these lists against AnyClass and NothingClass appearing,
+ * else loBounds.isEmpty will have different results for an empty
+ * constraint and one with Nothing as a lower bound. [Actually
+ * guarding addLoBound/addHiBound somehow broke raw types so it
+ * only guards against being created with them.]
+ */
+ private var lobounds = lo0 filterNot typeIsNothing
+ private var hibounds = hi0 filterNot typeIsAny
+ private var numlo = numlo0
+ private var numhi = numhi0
+ private var avoidWidening = avoidWidening0
+
+ def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
+ def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
+ def avoidWiden: Boolean = avoidWidening
+
+ def addLoBound(tp: Type, isNumericBound: Boolean = false) {
+ // For some reason which is still a bit fuzzy, we must let Nothing through as
+ // a lower bound despite the fact that Nothing is always a lower bound. My current
+ // supposition is that the side-effecting type constraint accumulation mechanism
+ // depends on these subtype tests being performed to make forward progress when
+ // there are mutally recursive type vars.
+ // See pos/t6367 and pos/t6499 for the competing test cases.
+ val mustConsider = tp.typeSymbol match {
+ case NothingClass => true
+ case _ => !(lobounds contains tp)
+ }
+ if (mustConsider) {
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numlo == NoType || isNumericSubType(numlo, tp))
+ numlo = tp
+ else if (!isNumericSubType(tp, numlo))
+ numlo = numericLoBound
+ }
+ else lobounds ::= tp
+ }
+ }
+
+ def checkWidening(tp: Type) {
+ if(tp.isStable) avoidWidening = true
+ else tp match {
+ case HasTypeMember(_, _) => avoidWidening = true
+ case _ =>
+ }
+ }
+
+ def addHiBound(tp: Type, isNumericBound: Boolean = false) {
+ // My current test case only demonstrates the need to let Nothing through as
+ // a lower bound, but I suspect the situation is symmetrical.
+ val mustConsider = tp.typeSymbol match {
+ case AnyClass => true
+ case _ => !(hibounds contains tp)
+ }
+ if (mustConsider) {
+ checkWidening(tp)
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numhi == NoType || isNumericSubType(tp, numhi))
+ numhi = tp
+ else if (!isNumericSubType(numhi, tp))
+ numhi = numericHiBound
+ }
+ else hibounds ::= tp
+ }
+ }
+
+ def isWithinBounds(tp: Type): Boolean =
+ lobounds.forall(_ <:< tp) &&
+ hibounds.forall(tp <:< _) &&
+ (numlo == NoType || (numlo weak_<:< tp)) &&
+ (numhi == NoType || (tp weak_<:< numhi))
+
+ var inst: Type = NoType // @M reduce visibility?
+
+ def instValid = (inst ne null) && (inst ne NoType)
+
+ def cloneInternal = {
+ val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
+ tc.inst = inst
+ tc
+ }
+
+ override def toString = {
+ val boundsStr = {
+ val lo = loBounds filterNot typeIsNothing
+ val hi = hiBounds filterNot typeIsAny
+ val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
+ val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
+
+ lostr ++ histr mkString ("[", " | ", "]")
+ }
+ if (inst eq NoType) boundsStr
+ else boundsStr + " _= " + inst.safeToString
+ }
+ }
+
+ /** Solve constraint collected in types `tvars`.
+ *
+ * @param tvars All type variables to be instantiated.
+ * @param tparams The type parameters corresponding to `tvars`
+ * @param variances The variances of type parameters; need to reverse
+ * solution direction for all contravariant variables.
+ * @param upper When `true` search for max solution else min.
+ */
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Variance], upper: Boolean): Boolean =
+ solve(tvars, tparams, variances, upper, AnyDepth)
+
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Variance], upper: Boolean, depth: Int): Boolean = {
+
+ def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) {
+ if (tvar.constr.inst == NoType) {
+ val up = if (variance.isContravariant) !upper else upper
+ tvar.constr.inst = null
+ val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
+ //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
+ var cyclic = bound contains tparam
+ foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
+ val ok = (tparam2 != tparam) && (
+ (bound contains tparam2)
+ || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
+ || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
+ )
+ if (ok) {
+ if (tvar2.constr.inst eq null) cyclic = true
+ solveOne(tvar2, tparam2, variance2)
+ }
+ })
+ if (!cyclic) {
+ if (up) {
+ if (bound.typeSymbol != AnyClass) {
+ log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.lo.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ } else {
+ if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
+ log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.hi.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ }
+ }
+ tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
+
+ //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
+ val newInst = (
+ if (up) {
+ if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
+ } else {
+ if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
+ }
+ )
+ log(s"$tvar setInst $newInst")
+ tvar setInst newInst
+ //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
+ }
+ }
+
+ // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
+ foreach3(tvars, tparams, variances)(solveOne)
+ tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
new file mode 100644
index 0000000000..51363c0f82
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -0,0 +1,1144 @@
+package scala.reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable, immutable }
+import Flags._
+import scala.annotation.tailrec
+import Variance._
+
+private[internal] trait TypeMaps {
+ self: SymbolTable =>
+ import definitions._
+
+ /** Normalize any type aliases within this type (@see Type#normalize).
+ * Note that this depends very much on the call to "normalize", not "dealias",
+ * so it is no longer carries the too-stealthy name "deAlias".
+ */
+ object normalizeAliases extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
+ mapOver(logResult(msg)(tp.normalize))
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** Remove any occurrence of type <singleton> from this type and its parents */
+ object dropSingletonType extends TypeMap {
+ def apply(tp: Type): Type = {
+ tp match {
+ case TypeRef(_, SingletonClass, _) =>
+ AnyClass.tpe
+ case tp1 @ RefinedType(parents, decls) =>
+ parents filter (_.typeSymbol != SingletonClass) match {
+ case Nil => AnyClass.tpe
+ case p :: Nil if decls.isEmpty => mapOver(p)
+ case ps => mapOver(copyRefinedType(tp1, ps, decls))
+ }
+ case tp1 =>
+ mapOver(tp1)
+ }
+ }
+ }
+
+ /** Type with all top-level occurrences of abstract types replaced by their bounds */
+ object abstractTypesToBounds extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias)
+ case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi)
+ case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls)
+ case AnnotatedType(_, _, _) => mapOver(tp)
+ case _ => tp // no recursion - top level only
+ }
+ }
+
+ // Set to true for A* => Seq[A]
+ // (And it will only rewrite A* in method result types.)
+ // This is the pre-existing behavior.
+ // Or false for Seq[A] => Seq[A]
+ // (It will rewrite A* everywhere but method parameters.)
+ // This is the specified behavior.
+ protected def etaExpandKeepsStar = false
+
+ /** Turn any T* types into Seq[T] except when
+ * in method parameter position.
+ */
+ object dropIllegalStarTypes extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case MethodType(params, restpe) =>
+ // Not mapping over params
+ val restpe1 = apply(restpe)
+ if (restpe eq restpe1) tp
+ else MethodType(params, restpe1)
+ case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
+ seqType(arg)
+ case _ =>
+ if (etaExpandKeepsStar) tp else mapOver(tp)
+ }
+ }
+
+ trait AnnotationFilter extends TypeMap {
+ def keepAnnotation(annot: AnnotationInfo): Boolean
+
+ override def mapOver(annot: AnnotationInfo) =
+ if (keepAnnotation(annot)) super.mapOver(annot)
+ else UnmappableAnnotation
+ }
+
+ trait KeepOnlyTypeConstraints extends AnnotationFilter {
+ // filter keeps only type constraint annotations
+ def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
+ }
+
+ // todo. move these into scala.reflect.api
+
+ /** A prototype for mapping a function over all possible types
+ */
+ abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) {
+ def this() = this(trackVariance = false)
+ def apply(tp: Type): Type
+
+ private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant
+
+ def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x }
+ def variance = _variance
+
+ /** Map this function over given type */
+ def mapOver(tp: Type): Type = tp match {
+ case tr @ TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ val args1 = (
+ if (trackVariance && args.nonEmpty && !variance.isInvariant && sym.typeParams.nonEmpty)
+ mapOverArgs(args, sym.typeParams)
+ else
+ args mapConserve this
+ )
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
+ case ThisType(_) => tp
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else singleType(pre1, sym)
+ }
+ case MethodType(params, result) =>
+ val params1 = flipped(mapOver(params))
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ val tparams1 = flipped(mapOver(tparams))
+ val result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case NullaryMethodType(result) =>
+ val result1 = this(result)
+ if (result1 eq result) tp
+ else NullaryMethodType(result1)
+ case ConstantType(_) => tp
+ case SuperType(thistp, supertp) =>
+ val thistp1 = this(thistp)
+ val supertp1 = this(supertp)
+ if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
+ else SuperType(thistp1, supertp1)
+ case TypeBounds(lo, hi) =>
+ val lo1 = flipped(this(lo))
+ val hi1 = this(hi)
+ if ((lo1 eq lo) && (hi1 eq hi)) tp
+ else TypeBounds(lo1, hi1)
+ case BoundedWildcardType(bounds) =>
+ val bounds1 = this(bounds)
+ if (bounds1 eq bounds) tp
+ else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
+ case rtp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve this
+ val decls1 = mapOver(decls)
+ copyRefinedType(rtp, parents1, decls1)
+ case ExistentialType(tparams, result) =>
+ val tparams1 = mapOver(tparams)
+ val result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
+ case OverloadedType(pre, alts) =>
+ val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
+ if (pre1 eq pre) tp
+ else OverloadedType(pre1, alts)
+ case AntiPolyType(pre, args) =>
+ val pre1 = this(pre)
+ val args1 = args mapConserve this
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else AntiPolyType(pre1, args1)
+ case tv@TypeVar(_, constr) =>
+ if (constr.instValid) this(constr.inst)
+ else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
+ case NotNullType(tp) =>
+ val tp1 = this(tp)
+ if (tp1 eq tp) tp
+ else NotNullType(tp1)
+ case AnnotatedType(annots, atp, selfsym) =>
+ val annots1 = mapOverAnnotations(annots)
+ val atp1 = this(atp)
+ if ((annots1 eq annots) && (atp1 eq atp)) tp
+ else if (annots1.isEmpty) atp1
+ else AnnotatedType(annots1, atp1, selfsym)
+ /*
+ case ErrorType => tp
+ case WildcardType => tp
+ case NoType => tp
+ case NoPrefix => tp
+ case ErasedSingleType(sym) => tp
+ */
+ case _ =>
+ tp
+ // throw new Error("mapOver inapplicable for " + tp);
+ }
+
+ def withVariance[T](v: Variance)(body: => T): T = {
+ val saved = variance
+ variance = v
+ try body finally variance = saved
+ }
+ @inline final def flipped[T](body: => T): T = {
+ if (trackVariance) variance = variance.flip
+ try body
+ finally if (trackVariance) variance = variance.flip
+ }
+ protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = (
+ if (trackVariance)
+ map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg)))
+ else
+ args mapConserve this
+ )
+ /** Applies this map to the symbol's info, setting variance = Invariant
+ * if necessary when the symbol is an alias.
+ */
+ private def applyToSymbolInfo(sym: Symbol): Type = {
+ if (trackVariance && !variance.isInvariant && sym.isAliasType)
+ withVariance(Invariant)(this(sym.info))
+ else
+ this(sym.info)
+ }
+
+ /** Called by mapOver to determine whether the original symbols can
+ * be returned, or whether they must be cloned.
+ */
+ protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = {
+ @tailrec def loop(syms: List[Symbol]): Boolean = syms match {
+ case Nil => true
+ case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs)
+ }
+ loop(origSyms)
+ }
+
+ /** Map this function over given scope */
+ def mapOver(scope: Scope): Scope = {
+ val elems = scope.toList
+ val elems1 = mapOver(elems)
+ if (elems1 eq elems) scope
+ else newScopeWith(elems1: _*)
+ }
+
+ /** Map this function over given list of symbols */
+ def mapOver(origSyms: List[Symbol]): List[Symbol] = {
+ // fast path in case nothing changes due to map
+ if (noChangeToSymbols(origSyms)) origSyms
+ // map is not the identity --> do cloning properly
+ else cloneSymbolsAndModify(origSyms, TypeMap.this)
+ }
+
+ def mapOver(annot: AnnotationInfo): AnnotationInfo = {
+ val AnnotationInfo(atp, args, assocs) = annot
+ val atp1 = mapOver(atp)
+ val args1 = mapOverAnnotArgs(args)
+ // there is no need to rewrite assocs, as they are constants
+
+ if ((args eq args1) && (atp eq atp1)) annot
+ else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable
+ else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
+ }
+
+ def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
+ val annots1 = annots mapConserve mapOver
+ if (annots1 eq annots) annots
+ else annots1 filterNot (_ eq UnmappableAnnotation)
+ }
+
+ /** Map over a set of annotation arguments. If any
+ * of the arguments cannot be mapped, then return Nil. */
+ def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
+ val args1 = args mapConserve mapOver
+ if (args1 contains UnmappableTree) Nil
+ else args1
+ }
+
+ def mapOver(tree: Tree): Tree =
+ mapOver(tree, () => return UnmappableTree)
+
+ /** Map a tree that is part of an annotation argument.
+ * If the tree cannot be mapped, then invoke giveup().
+ * The default is to transform the tree with
+ * TypeMapTransformer.
+ */
+ def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
+ (new TypeMapTransformer).transform(tree)
+
+ /** This transformer leaves the tree alone except to remap
+ * its types. */
+ class TypeMapTransformer extends Transformer {
+ override def transform(tree: Tree) = {
+ val tree1 = super.transform(tree)
+ val tpe1 = TypeMap.this(tree1.tpe)
+ if ((tree eq tree1) && (tree.tpe eq tpe1))
+ tree
+ else
+ tree1.shallowDuplicate.setType(tpe1)
+ }
+ }
+ }
+
+ abstract class TypeTraverser extends TypeMap {
+ def traverse(tp: Type): Unit
+ def apply(tp: Type): Type = { traverse(tp); tp }
+ }
+
+ abstract class TypeTraverserWithResult[T] extends TypeTraverser {
+ def result: T
+ def clear(): Unit
+ }
+
+ abstract class TypeCollector[T](initial: T) extends TypeTraverser {
+ var result: T = _
+ def collect(tp: Type) = {
+ result = initial
+ traverse(tp)
+ result
+ }
+ }
+
+ /** The raw to existential map converts a ''raw type'' to an existential type.
+ * It is necessary because we might have read a raw type of a
+ * parameterized Java class from a class file. At the time we read the type
+ * the corresponding class file might still not be read, so we do not
+ * know what the type parameters of the type are. Therefore
+ * the conversion of raw types to existential types might not have taken place
+ * in ClassFileparser.sigToType (where it is usually done).
+ */
+ def rawToExistential = new TypeMap {
+ private var expanded = immutable.Set[Symbol]()
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
+ if (expanded contains sym) AnyRefClass.tpe
+ else try {
+ expanded += sym
+ val eparams = mapOver(typeParamsToExistentials(sym))
+ existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
+ } finally {
+ expanded -= sym
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ /***
+ *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
+ object rawToExistentialInJava extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ // any symbol that occurs in a java sig, not just java symbols
+ // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
+ case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
+ val eparams = typeParamsToExistentials(sym, sym.typeParams)
+ existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ */
+
+ /** Used by existentialAbstraction.
+ */
+ class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) {
+ private val occurCount = mutable.HashMap[Symbol, Int]()
+ private def countOccs(tp: Type) = {
+ tp foreach {
+ case TypeRef(_, sym, _) =>
+ if (tparams contains sym)
+ occurCount(sym) += 1
+ case _ => ()
+ }
+ }
+ def extrapolate(tpe: Type): Type = {
+ tparams foreach (t => occurCount(t) = 0)
+ countOccs(tpe)
+ for (tparam <- tparams)
+ countOccs(tparam.info)
+
+ apply(tpe)
+ }
+
+ /** If these conditions all hold:
+ * 1) we are in covariant (or contravariant) position
+ * 2) this type occurs exactly once in the existential scope
+ * 3) the widened upper (or lower) bound of this type contains no references to tparams
+ * Then we replace this lone occurrence of the type with the widened upper (or lower) bound.
+ * All other types pass through unchanged.
+ */
+ def apply(tp: Type): Type = {
+ val tp1 = mapOver(tp)
+ if (variance.isInvariant) tp1
+ else tp1 match {
+ case TypeRef(pre, sym, args) if tparams contains sym =>
+ val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
+ val count = occurCount(sym)
+ val containsTypeParam = tparams exists (repl contains _)
+ def msg = {
+ val word = if (variance.isPositive) "upper" else "lower"
+ s"Widened lone occurrence of $tp1 inside existential to $word bound"
+ }
+ if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
+ logResult(msg)(repl)
+ else
+ tp1
+ case _ =>
+ tp1
+ }
+ }
+ override def mapOver(tp: Type): Type = tp match {
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if ((pre1 eq pre) || !pre1.isStable) tp
+ else singleType(pre1, sym)
+ }
+ case _ => super.mapOver(tp)
+ }
+
+ // Do not discard the types of existential ident's. The
+ // symbol of the Ident itself cannot be listed in the
+ // existential's parameters, so the resulting existential
+ // type would be ill-formed.
+ override def mapOver(tree: Tree) = tree match {
+ case Ident(_) if tree.tpe.isStable => tree
+ case _ => super.mapOver(tree)
+ }
+ }
+
+ /** Might the given symbol be important when calculating the prefix
+ * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
+ * the result will be `tp` unchanged if `pre` is trivial and `clazz`
+ * is a symbol such that isPossiblePrefix(clazz) == false.
+ */
+ def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+
+ protected[internal] def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
+
+ def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+ new AsSeenFromMap(pre, clazz)
+
+ /** A map to compute the asSeenFrom method.
+ */
+ class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ // Some example source constructs relevant in asSeenFrom:
+ //
+ // object CaptureThis {
+ // trait X[A] { def f: this.type = this }
+ // class Y[A] { def f: this.type = this }
+ // // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton
+ // def f1[B] = new X[B] { }
+ // // TODO - why is the behavior different when it's a class?
+ // def f2[B] = new Y[B] { }
+ // }
+ // class CaptureVal[T] {
+ // val f: java.util.List[_ <: T] = null
+ // // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1
+ // def g = f get 0
+ // }
+ // class ClassParam[T] {
+ // // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam)
+ // class Inner(lhs: T) { def f = lhs }
+ // }
+ def capturedParams: List[Symbol] = _capturedParams
+ def capturedSkolems: List[Symbol] = _capturedSkolems
+
+ def apply(tp: Type): Type = tp match {
+ case tp @ ThisType(_) => thisTypeAsSeen(tp)
+ case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp)
+ case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp)
+ case _ => mapOver(tp)
+ }
+
+ private var _capturedSkolems: List[Symbol] = Nil
+ private var _capturedParams: List[Symbol] = Nil
+ private val isStablePrefix = seenFromPrefix.isStable
+
+ // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate
+ // but less succinct name.
+ private def isBaseClassOfEnclosingClass(base: Symbol) = {
+ def loop(encl: Symbol): Boolean = (
+ isPossiblePrefix(encl)
+ && ((encl isSubClass base) || loop(encl.owner.enclClass))
+ )
+ // The hasCompleteInfo guard is necessary to avoid cycles during the typing
+ // of certain classes, notably ones defined inside package objects.
+ !base.hasCompleteInfo || loop(seenFromClass)
+ }
+
+ /** Is the symbol a class type parameter from one of the enclosing
+ * classes, or a base class of one of them?
+ */
+ private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = (
+ sym.isTypeParameter
+ && sym.owner.isClass
+ && isBaseClassOfEnclosingClass(sym.owner)
+ )
+
+ /** Creates an existential representing a type parameter which appears
+ * in the prefix of a ThisType.
+ */
+ protected def captureThis(pre: Type, clazz: Symbol): Type = {
+ capturedParams find (_.owner == clazz) match {
+ case Some(p) => p.tpe
+ case _ =>
+ val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
+ _capturedParams ::= qvar
+ debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}")
+ qvar.tpe
+ }
+ }
+ protected def captureSkolems(skolems: List[Symbol]) {
+ for (p <- skolems; if !(capturedSkolems contains p)) {
+ debuglog(s"Captured $p seen from $seenFromPrefix")
+ _capturedSkolems ::= p
+ }
+ }
+
+ /** Find the type argument in an applied type which corresponds to a type parameter.
+ * The arguments are required to be related as follows, through intermediary `clazz`.
+ * An exception will be thrown if this is violated.
+ *
+ * @param lhs its symbol is a type parameter of `clazz`
+ * @param rhs a type application constructed from `clazz`
+ */
+ private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
+ val TypeRef(_, lhsSym, lhsArgs) = lhs
+ val TypeRef(_, rhsSym, rhsArgs) = rhs
+ require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
+
+ // Find the type parameter position; we'll use the corresponding argument
+ val argIndex = rhsSym.typeParams indexOf lhsSym
+
+ if (argIndex >= 0 && argIndex < rhsArgs.length) // @M! don't just replace the whole thing, might be followed by type application
+ appliedType(rhsArgs(argIndex), lhsArgs mapConserve this)
+ else if (rhsSym.tpe_*.parents exists typeIsErroneous) // don't be too zealous with the exceptions, see #2641
+ ErrorType
+ else
+ abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs")
+ }
+
+ // 0) @pre: `classParam` is a class type parameter
+ // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam`
+ // 2) Take the base type of the prefix at that point with respect to the owning class
+ // 3) Solve for the type parameters through correspondence with the type args of the base type
+ //
+ // Only class type parameters (and not skolems) are considered, because other type parameters
+ // are not influenced by the prefix through which they are seen. Note that type params of
+ // anonymous type functions, which currently can only arise from normalising type aliases, are
+ // owned by the type alias of which they are the eta-expansion.
+ private def classParameterAsSeen(classParam: Type): Type = {
+ val TypeRef(_, tparam, _) = classParam
+
+ def loop(pre: Type, clazz: Symbol): Type = {
+ // have to deconst because it may be a Class[T]
+ def nextBase = (pre baseType clazz).deconst
+ //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
+ if (skipPrefixOf(pre, clazz))
+ mapOver(classParam)
+ else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner))
+ loop(nextBase.prefix, clazz.owner)
+ else nextBase match {
+ case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied)
+ case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz)
+ case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}")
+ }
+ }
+ loop(seenFromPrefix, seenFromClass)
+ }
+
+ // Does the candidate symbol match the given prefix and class?
+ // Since pre may be something like ThisType(A) where trait A { self: B => },
+ // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or
+ // B will not be considered.
+ private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match {
+ case _: TypeVar => false
+ case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz)
+ }
+
+ // Whether the annotation tree currently being mapped over has had a This(_) node rewritten.
+ private[this] var wroteAnnotation = false
+ private object annotationArgRewriter extends TypeMapTransformer {
+ private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz)
+
+ // what symbol should really be used?
+ private def newThis(): Tree = {
+ wroteAnnotation = true
+ val presym = seenFromPrefix.widen.typeSymbol
+ val thisSym = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix
+ gen.mkAttributedQualifier(seenFromPrefix, thisSym)
+ }
+
+ /** Rewrite `This` trees in annotation argument trees */
+ override def transform(tree: Tree): Tree = super.transform(tree) match {
+ case This(_) if matchesThis(tree.symbol) => newThis()
+ case tree => tree
+ }
+ }
+
+ // This becomes considerably cheaper if we optimize for the common cases:
+ // where the prefix is stable and where no This nodes are rewritten. If
+ // either is true, then we don't need to worry about calling giveup. So if
+ // the prefix is unstable, use a stack variable to indicate whether the tree
+ // was touched. This takes us to one allocation per AsSeenFromMap rather
+ // than an allocation on every call to mapOver, and no extra work when the
+ // tree only has its types remapped.
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ if (isStablePrefix)
+ annotationArgRewriter transform tree
+ else {
+ val saved = wroteAnnotation
+ wroteAnnotation = false
+ try annotationArgRewriter transform tree
+ finally if (wroteAnnotation) giveup() else wroteAnnotation = saved
+ }
+ }
+
+ private def thisTypeAsSeen(tp: ThisType): Type = {
+ def loop(pre: Type, clazz: Symbol): Type = {
+ val pre1 = pre match {
+ case SuperType(thistpe, _) => thistpe
+ case _ => pre
+ }
+ if (skipPrefixOf(pre, clazz))
+ mapOver(tp) // TODO - is mapOver necessary here?
+ else if (!matchesPrefixAndClass(pre, clazz)(tp.sym))
+ loop((pre baseType clazz).prefix, clazz.owner)
+ else if (pre1.isStable)
+ pre1
+ else
+ captureThis(pre1, clazz)
+ }
+ loop(seenFromPrefix, seenFromClass)
+ }
+
+ private def singleTypeAsSeen(tp: SingleType): Type = {
+ val SingleType(pre, sym) = tp
+
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else if (pre1.isStable) singleType(pre1, sym)
+ else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
+ }
+
+ override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)"
+ }
+
+ /** A base class to compute all substitutions */
+ abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
+ assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
+
+ /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
+ protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
+
+ /** Map target to type, can be tuned by subclasses */
+ protected def toType(fromtp: Type, tp: T): Type
+
+ protected def renameBoundSyms(tp: Type): Type = tp match {
+ case MethodType(ps, restp) =>
+ createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
+ case PolyType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
+ case ExistentialType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)(newExistentialType)
+ case _ =>
+ tp
+ }
+
+ @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = (
+ if (from.isEmpty) tp
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+ else if (matches(from.head, sym)) toType(tp, to.head)
+ else subst(tp, sym, from.tail, to.tail)
+ )
+
+ def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
+ val boundSyms = tp0.boundSyms
+ val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
+ val tp = mapOver(tp1)
+ def substFor(sym: Symbol) = subst(tp, sym, from, to)
+
+ tp match {
+ // @M
+ // 1) arguments must also be substituted (even when the "head" of the
+ // applied type has already been substituted)
+ // example: (subst RBound[RT] from [type RT,type RBound] to
+ // [type RT&,type RBound&]) = RBound&[RT&]
+ // 2) avoid loops (which occur because alpha-conversion is
+ // not performed properly imo)
+ // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
+ // we must replace the a in Iterable[a] by (a,b)
+ // (must not recurse --> loops)
+ // 3) replacing m by List in m[Int] should yield List[Int], not just List
+ case TypeRef(NoPrefix, sym, args) =>
+ val tcon = substFor(sym)
+ if ((tp eq tcon) || args.isEmpty) tcon
+ else appliedType(tcon.typeConstructor, args)
+ case SingleType(NoPrefix, sym) =>
+ substFor(sym)
+ case _ =>
+ tp
+ }
+ }
+ }
+
+ /** A map to implement the `substSym` method. */
+ class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+ def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
+ protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
+ case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
+ case SingleType(pre, _) => singleType(pre, sym)
+ }
+ @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = (
+ if (from.isEmpty) sym
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
+ else if (matches(from.head, sym)) to.head
+ else subst(sym, from.tail, to.tail)
+ )
+ private def substFor(sym: Symbol) = subst(sym, from, to)
+
+ override def apply(tp: Type): Type = (
+ if (from.isEmpty) tp
+ else tp match {
+ case TypeRef(pre, sym, args) if pre ne NoPrefix =>
+ val newSym = substFor(sym)
+ // mapOver takes care of subst'ing in args
+ mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
+ // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
+ case SingleType(pre, sym) if pre ne NoPrefix =>
+ val newSym = substFor(sym)
+ mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
+ case _ =>
+ super.apply(tp)
+ }
+ )
+
+ object mapTreeSymbols extends TypeMapTransformer {
+ val strictCopy = newStrictTreeCopier
+
+ def termMapsTo(sym: Symbol) = from indexOf sym match {
+ case -1 => None
+ case idx => Some(to(idx))
+ }
+
+ // if tree.symbol is mapped to another symbol, passes the new symbol into the
+ // constructor `trans` and sets the symbol and the type on the resulting tree.
+ def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
+ case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
+ case None => tree
+ }
+
+ // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
+ override def transform(tree: Tree) = {
+ // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
+ super.transform(tree) match {
+ case id @ Ident(_) =>
+ transformIfMapped(id)(toSym =>
+ strictCopy.Ident(id, toSym.name))
+
+ case sel @ Select(qual, name) =>
+ transformIfMapped(sel)(toSym =>
+ strictCopy.Select(sel, qual, toSym.name))
+
+ case tree => tree
+ }
+ }
+ }
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ mapTreeSymbols.transform(tree)
+ }
+ }
+
+ /** A map to implement the `subst` method. */
+ class SubstTypeMap(from: List[Symbol], to: List[Type])
+ extends SubstMap(from, to) {
+ protected def toType(fromtp: Type, tp: Type) = tp
+
+ override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
+ object trans extends TypeMapTransformer {
+ override def transform(tree: Tree) = tree match {
+ case Ident(name) =>
+ from indexOf tree.symbol match {
+ case -1 => super.transform(tree)
+ case idx =>
+ val totpe = to(idx)
+ if (totpe.isStable) tree.duplicate setType totpe
+ else giveup()
+ }
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ trans.transform(tree)
+ }
+ }
+
+ /** A map to implement the `substThis` method. */
+ class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) if (sym == from) => to
+ case _ => mapOver(tp)
+ }
+ }
+
+ class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
+ def apply(tp: Type): Type = try {
+ tp match {
+ case TypeRef(_, sym, _) if from contains sym =>
+ BoundedWildcardType(sym.info.bounds)
+ case _ =>
+ mapOver(tp)
+ }
+ } catch {
+ case ex: MalformedType =>
+ WildcardType
+ }
+ }
+
+ // dependent method types
+ object IsDependentCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (tp.isImmediatelyDependent) result = true
+ else if (!result) mapOver(tp)
+ }
+ }
+
+ object ApproximateDependentMap extends TypeMap {
+ def apply(tp: Type): Type =
+ if (tp.isImmediatelyDependent) WildcardType
+ else mapOver(tp)
+ }
+
+ /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
+ */
+ class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
+ private val actuals = actuals0.toIndexedSeq
+ private val existentials = new Array[Symbol](actuals.size)
+ def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
+
+ private object StableArg {
+ def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
+ tp.isStable && (tp.typeSymbol != NothingClass)
+ )
+ }
+ private object Arg {
+ def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
+ }
+
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // unsound to replace args by unstable actual #3873
+ case SingleType(NoPrefix, StableArg(arg)) => arg
+ // (soundly) expand type alias selections on implicit arguments,
+ // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
+ case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
+ val arg = actuals(pid)
+ val res = typeRef(arg, sym, targs)
+ if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
+ // don't return the original `tp`, which may be different from `tp1`,
+ // due to dropping annotations
+ case tp1 => tp1
+ }
+
+ /* Return the type symbol for referencing a parameter inside the existential quantifier.
+ * (Only needed if the actual is unstable.)
+ */
+ private def existentialFor(pid: Int) = {
+ if (existentials(pid) eq null) {
+ val param = params(pid)
+ existentials(pid) = (
+ param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
+ setInfo singletonBounds(actuals(pid))
+ )
+ }
+ existentials(pid)
+ }
+
+ //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
+ override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
+ // TODO: this should be simplified; in the stable case, one can
+ // probably just use an Ident to the tree.symbol.
+ //
+ // @PP: That leads to failure here, where stuff no longer has type
+ // 'String @Annot("stuff")' but 'String @Annot(x)'.
+ //
+ // def m(x: String): String @Annot(x) = x
+ // val stuff = m("stuff")
+ //
+ // (TODO cont.) Why an existential in the non-stable case?
+ //
+ // @PP: In the following:
+ //
+ // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
+ //
+ // m is typed as 'String @Annot(x) forSome { val x: String }'.
+ //
+ // Both examples are from run/constrained-types.scala.
+ object treeTrans extends Transformer {
+ override def transform(tree: Tree): Tree = tree.symbol match {
+ case StableArg(actual) =>
+ gen.mkAttributedQualifier(actual, tree.symbol)
+ case Arg(pid) =>
+ val sym = existentialFor(pid)
+ Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ treeTrans transform arg
+ }
+ }
+
+ /** A map to convert every occurrence of a wildcard type to a fresh
+ * type variable */
+ object wildcardToTypeVarMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case WildcardType =>
+ TypeVar(tp, new TypeConstraint)
+ case BoundedWildcardType(bounds) =>
+ TypeVar(tp, new TypeConstraint(bounds))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** A map to convert every occurrence of a type variable to a wildcard type. */
+ object typeVarToOriginMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeVar(origin, _) => origin
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ tp.normalize match {
+ case TypeRef(_, sym1, _) if (sym == sym1) => result = true
+ case SingleType(_, sym1) if (sym == sym1) => result = true
+ case _ => mapOver(tp)
+ }
+ }
+ }
+
+ override def mapOver(arg: Tree) = {
+ for (t <- arg) {
+ traverse(t.tpe)
+ if (t.symbol == sym)
+ result = true
+ }
+ arg
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ if (tp eq t) result = true
+ else mapOver(tp)
+ }
+ }
+ override def mapOver(arg: Tree) = {
+ for (t <- arg)
+ traverse(t.tpe)
+
+ arg
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (p(tp)) result ::= tp
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `collect` method. */
+ class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (pf.isDefinedAt(tp)) result ::= pf(tp)
+ mapOver(tp)
+ }
+ }
+
+ class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
+ def traverse(tp: Type) {
+ f(tp)
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
+ def traverse(tp: Type) {
+ if (result.isEmpty) {
+ if (p(tp)) result = Some(tp)
+ mapOver(tp)
+ }
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ object ErroneousCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ result = tp.isError
+ mapOver(tp)
+ }
+ }
+ }
+
+ object adaptToNewRunMap extends TypeMap {
+
+ private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
+ if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
+ sym
+ else if (sym.isModuleClass) {
+ val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
+
+ sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
+ val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
+ debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
+ sym
+ }
+ }
+ else {
+ var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse {
+ if (sym.isAliasType) throw missingAliasException
+ devWarning(s"$pre.$sym no longer exist at phase $phase")
+ throw new MissingTypeControl // For build manager and presentation compiler purposes
+ }
+ /** The two symbols have the same fully qualified name */
+ def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
+ sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
+ if (!corresponds(sym.owner, rebind0.owner)) {
+ debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
+ val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner))
+ if (bcs.isEmpty)
+ assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
+ else
+ rebind0 = pre.baseType(bcs.head).member(sym.name)
+ debuglog(
+ "ADAPT2 pre = " + pre +
+ ", bcs.head = " + bcs.head +
+ ", sym = " + sym.fullLocationString +
+ ", rebind = " + rebind0.fullLocationString
+ )
+ }
+ rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
+ debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
+ throw new MalformedType(pre, sym.nameString)
+ }
+ }
+ }
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) =>
+ try {
+ val sym1 = adaptToNewRun(sym.owner.thisType, sym)
+ if (sym1 == sym) tp else ThisType(sym1)
+ } catch {
+ case ex: MissingTypeControl =>
+ tp
+ }
+ case SingleType(pre, sym) =>
+ if (sym.isPackage) tp
+ else {
+ val pre1 = this(pre)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym)) tp
+ else singleType(pre1, sym1)
+ } catch {
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case TypeRef(pre, sym, args) =>
+ if (sym.isPackageClass) tp
+ else {
+ val pre1 = this(pre)
+ val args1 = args mapConserve (this)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
+ tp
+ } else if (sym1 == NoSymbol) {
+ devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym")
+ tp
+ } else {
+ copyTypeRef(tp, pre1, sym1, args1)
+ }
+ } catch {
+ case ex: MissingAliasControl =>
+ apply(tp.dealias)
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case MethodType(params, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else copyMethodType(tp, params, restp1)
+ case NullaryMethodType(restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else NullaryMethodType(restp1)
+ case PolyType(tparams, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else PolyType(tparams, restp1)
+
+ // Lukas: we need to check (together) whether we should also include parameter types
+ // of PolyType and MethodType in adaptToNewRun
+
+ case ClassInfoType(parents, decls, clazz) =>
+ if (clazz.isPackageClass) tp
+ else {
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else ClassInfoType(parents1, decls, clazz)
+ }
+ case RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
+ case SuperType(_, _) => mapOver(tp)
+ case TypeBounds(_, _) => mapOver(tp)
+ case TypeVar(_, _) => mapOver(tp)
+ case AnnotatedType(_,_,_) => mapOver(tp)
+ case NotNullType(_) => mapOver(tp)
+ case ExistentialType(_, _) => mapOver(tp)
+ case _ => tp
+ }
+ }
+
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
new file mode 100644
index 0000000000..263b0f5a3e
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
@@ -0,0 +1,29 @@
+package scala.reflect
+package internal
+package tpe
+
+private[internal] trait TypeToStrings {
+ self: SymbolTable =>
+
+ /** The maximum number of recursions allowed in toString
+ */
+ final val maxTostringRecursions = 50
+
+ private var tostringRecursions = 0
+
+ protected def typeToString(tpe: Type): String =
+ if (tostringRecursions >= maxTostringRecursions) {
+ devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe))
+ if (settings.debug.value)
+ (new Throwable).printStackTrace
+
+ "..."
+ }
+ else
+ try {
+ tostringRecursions += 1
+ tpe.safeToString
+ } finally {
+ tostringRecursions -= 1
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 52d1657dc3..d83b4d71d9 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -16,7 +16,7 @@ trait Erasure {
/** Is `tp` an unbounded generic type (i.e. which could be instantiated
* with primitive as well as class types)?.
*/
- private def genericCore(tp: Type): Type = tp.normalize match {
+ private def genericCore(tp: Type): Type = tp.dealiasWiden match {
/* A Java Array<T> is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is
* erased to Object. However, there is only symbol for the Array class. So to make the distinction between
* a Java and a Scala array, we check if the owner of T comes from a Java class.
@@ -36,7 +36,7 @@ trait Erasure {
* then Some((N, T)) where N is the number of Array constructors enclosing `T`,
* otherwise None. Existentials on any level are ignored.
*/
- def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match {
+ def unapply(tp: Type): Option[(Int, Type)] = tp.dealiasWiden match {
case TypeRef(_, ArrayClass, List(arg)) =>
genericCore(arg) match {
case NoType =>
@@ -69,7 +69,7 @@ trait Erasure {
//
// This requires that cls.isClass.
protected def rebindInnerClass(pre: Type, cls: Symbol): Type = {
- if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
+ if (cls.owner.isClass) cls.owner.tpe_* else pre // why not cls.isNestedClass?
}
def unboxDerivedValueClassMethod(clazz: Symbol): Symbol =
@@ -101,7 +101,7 @@ trait Erasure {
def valueClassIsParametric(clazz: Symbol): Boolean = {
assert(!phase.erasedTypes)
clazz.typeParams contains
- clazz.derivedValueClassUnbox.tpe.resultType.normalize.typeSymbol
+ clazz.derivedValueClassUnbox.tpe.resultType.typeSymbol
}
abstract class ErasureMap extends TypeMap {
@@ -130,7 +130,7 @@ trait Erasure {
else if (sym.isRefinementClass) apply(mergeParents(tp.parents))
else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref)
else if (sym.isClass) eraseNormalClassRef(pre, sym)
- else apply(sym.info) // alias type or abstract type
+ else apply(sym.info asSeenFrom (pre, sym.owner)) // alias type or abstract type
case PolyType(tparams, restpe) =>
apply(restpe)
case ExistentialType(tparams, restpe) =>
@@ -214,9 +214,6 @@ trait Erasure {
specialConstructorErasure(clazz, restpe)
case ExistentialType(tparams, restpe) =>
specialConstructorErasure(clazz, restpe)
- case RefinedType(parents, decls) =>
- specialConstructorErasure(
- clazz, specialScalaErasure.mergeParents(parents))
case mt @ MethodType(params, restpe) =>
MethodType(
cloneSymbolsAndModify(params, specialScalaErasure),
@@ -225,15 +222,7 @@ trait Erasure {
typeRef(pre, clazz, List())
case tp =>
if (!(clazz == ArrayClass || tp.isError))
- // See SI-6556. It seems in some cases the result constructor
- // type of an anonymous class is a different version of the class.
- // This has nothing to do with value classes per se.
- // We simply used a less discriminating transform before, that
- // did not look at the cases in detail.
- // It seems there is a deeper problem here, which needs
- // following up to. But we will not risk regressions
- // in 2.10 because of it.
- log(s"!!! unexpected constructor erasure $tp for $clazz")
+ assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz")
specialScalaErasure(tp)
}
}
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 6dc6a0f7b8..32d3171b26 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -10,6 +10,14 @@ trait UnCurry {
import global._
import definitions._
+ /** Note: changing tp.normalize to tp.dealias in this method leads to a single
+ * test failure: run/t5688.scala, where instead of the expected output
+ * Vector(ta, tb, tab)
+ * we instead get
+ * Vector(tab, tb, tab)
+ * I think that difference is not the product of sentience but of randomness.
+ * Let us figure out why it is and then change this method.
+ */
private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp
val uncurry: TypeMap = new TypeMap {
@@ -60,4 +68,4 @@ trait UnCurry {
*/
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isType) uncurryType(tp) else uncurry(tp)
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 2ba15e0776..63b7f73386 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -40,8 +40,6 @@ trait Collections {
mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x))
if (res eq null) None else res
}
- final def mfilter[A](xss: List[List[A]])(p: A => Boolean) =
- for (xs <- xss; x <- xs; if p(x)) yield x
final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
val lb = new ListBuffer[C]
@@ -78,19 +76,6 @@ trait Collections {
lb.toList
}
- final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = {
- val buf = new ListBuffer[A]
- val seen = mutable.Set[B]()
- xs foreach { x =>
- val y = f(x)
- if (!seen(y)) {
- buf += x
- seen += y
- }
- }
- buf.toList
- }
-
@tailrec final def flattensToEmpty(xss: Seq[Seq[_]]): Boolean = {
xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail)
}
@@ -141,7 +126,7 @@ trait Collections {
ys1 = ys1.tail
ys2 = ys2.tail
}
- buf.result
+ buf.result()
}
final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = {
var ys1 = xs1
@@ -189,18 +174,6 @@ trait Collections {
}
false
}
- final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
- var ys1 = xs1
- var ys2 = xs2
- while (!ys1.isEmpty && !ys2.isEmpty) {
- if (!f(ys1.head, ys2.head))
- return false
-
- ys1 = ys1.tail
- ys2 = ys2.tail
- }
- true
- }
final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = {
var ys1 = xs1
var ys2 = xs2
@@ -222,6 +195,3 @@ trait Collections {
case _: IllegalArgumentException => None
}
}
-
-object Collections extends Collections { }
-
diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala
index 4135f3c469..74b6a54c6e 100644
--- a/src/reflect/scala/reflect/internal/util/HashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/HashSet.scala
@@ -6,8 +6,6 @@
package scala.reflect.internal.util
object HashSet {
- def apply[T >: Null <: AnyRef](): HashSet[T] = this(16)
- def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16)
def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity)
def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] =
new HashSet[T](label, initialCapacity)
diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala
index 3259a12163..a2b9e24ebc 100644
--- a/src/reflect/scala/reflect/internal/util/Origins.scala
+++ b/src/reflect/scala/reflect/internal/util/Origins.scala
@@ -6,9 +6,7 @@
package scala.reflect
package internal.util
-import NameTransformer._
import scala.collection.{ mutable, immutable }
-import Origins._
/** A debugging class for logging from whence a method is being called.
* Say you wanted to discover who was calling phase_= in SymbolTable.
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index 8f287a1640..bb8c9e9b26 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -129,7 +129,7 @@ abstract class Position extends scala.reflect.api.Position { self =>
def endOrPoint: Int = point
@deprecated("use point instead", "2.9.0")
- def offset: Option[Int] = if (isDefined) Some(point) else None
+ def offset: Option[Int] = if (isDefined) Some(point) else None // used by sbt
/** The same position with a different start value (if a range) */
def withStart(off: Int): Position = this
@@ -266,46 +266,3 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e
}
override def show = "["+point+"]"
}
-
-/** new for position ranges */
-class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
-extends OffsetPosition(source, point) {
- if (start > end) sys.error("bad position: "+show)
- override def isRange: Boolean = true
- override def isOpaqueRange: Boolean = true
- override def startOrPoint: Int = start
- override def endOrPoint: Int = end
- override def withStart(off: Int) = new RangePosition(source, off, point, end)
- override def withEnd(off: Int) = new RangePosition(source, start, point, off)
- override def withPoint(off: Int) = new RangePosition(source, start, off, end)
- override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
- override def focusStart = new OffsetPosition(source, start)
- override def focus = {
- if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
- focusCache
- }
- override def focusEnd = new OffsetPosition(source, end)
- override def makeTransparent = new TransparentPosition(source, start, point, end)
- override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
- override def union(pos: Position): Position =
- if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
-
- override def toSingleLine: Position = source match {
- case bs: BatchSourceFile
- if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
- val pointLine = bs.offsetToLine(point)
- new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
- case _ => this
- }
-
- override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")"
- override def show = "["+start+":"+end+"]"
- private var focusCache: Position = NoPosition
-}
-
-class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
- override def isOpaqueRange: Boolean = false
- override def isTransparent = true
- override def makeTransparent = this
- override def show = "<"+start+":"+end+">"
-}
diff --git a/src/reflect/scala/reflect/internal/util/RangePosition.scala b/src/reflect/scala/reflect/internal/util/RangePosition.scala
new file mode 100644
index 0000000000..3712aa0a52
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/RangePosition.scala
@@ -0,0 +1,49 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect.internal.util
+
+/** new for position ranges */
+class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
+extends OffsetPosition(source, point) {
+ if (start > end) sys.error("bad position: "+show)
+ override def isRange: Boolean = true
+ override def isOpaqueRange: Boolean = true
+ override def startOrPoint: Int = start
+ override def endOrPoint: Int = end
+ override def withStart(off: Int) = new RangePosition(source, off, point, end)
+ override def withEnd(off: Int) = new RangePosition(source, start, point, off)
+ override def withPoint(off: Int) = new RangePosition(source, start, off, end)
+ override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
+ override def focusStart = new OffsetPosition(source, start)
+ override def focus = {
+ if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
+ focusCache
+ }
+ override def focusEnd = new OffsetPosition(source, end)
+ override def makeTransparent = new TransparentPosition(source, start, point, end)
+ override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
+ override def union(pos: Position): Position =
+ if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
+
+ override def toSingleLine: Position = source match {
+ case bs: BatchSourceFile
+ if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
+ val pointLine = bs.offsetToLine(point)
+ new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
+ case _ => this
+ }
+
+ override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")"
+ override def show = "["+start+":"+end+"]"
+ private var focusCache: Position = NoPosition
+}
+
+class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
+ override def isOpaqueRange: Boolean = false
+ override def isTransparent = true
+ override def makeTransparent = this
+ override def show = "<"+start+":"+end+">"
+}
diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala
index 36bdb8174a..57e5e0c0b9 100644
--- a/src/reflect/scala/reflect/internal/util/Set.scala
+++ b/src/reflect/scala/reflect/internal/util/Set.scala
@@ -18,8 +18,6 @@ abstract class Set[T <: AnyRef] {
def apply(x: T): Boolean = contains(x)
- @deprecated("use `iterator` instead", "2.9.0") def elements = iterator
-
def contains(x: T): Boolean =
findEntry(x) ne null
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index bc2d0ee4db..dd2a6e21f1 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -24,7 +24,6 @@ abstract class SourceFile {
assert(offset < length, file + ": " + offset + " >= " + length)
new OffsetPosition(this, offset)
}
- def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column)
def offsetToLine(offset: Int): Int
def lineToOffset(index : Int): Int
@@ -37,9 +36,6 @@ abstract class SourceFile {
def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString
def path = file.path
- def beginsWith(offset: Int, text: String): Boolean =
- (content drop offset) startsWith text
-
def lineToString(index: Int): String =
content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString ""
@@ -81,7 +77,6 @@ object ScriptSourceFile {
}
else 0
}
- def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs)
def apply(file: AbstractFile, content: Array[Char]) = {
val underlying = new BatchSourceFile(file, content)
@@ -91,7 +86,6 @@ object ScriptSourceFile {
stripped
}
}
-import ScriptSourceFile._
class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) {
override def isSelfContained = false
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index af4a0263ec..0fa798058d 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -257,7 +257,6 @@ quant)
def enabled = _enabled
def enabled_=(cond: Boolean) = {
if (cond && !_enabled) {
- val test = new Timer("", Nil)
val start = System.nanoTime()
var total = 0L
for (i <- 1 to 10000) {
diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala
index 8f6c409e0b..93bbfdd273 100644
--- a/src/reflect/scala/reflect/internal/util/StringOps.scala
+++ b/src/reflect/scala/reflect/internal/util/StringOps.scala
@@ -6,7 +6,6 @@
** |/ **
\* */
-
package scala.reflect.internal.util
/** This object provides utility methods to extract elements
@@ -16,22 +15,14 @@ package scala.reflect.internal.util
* @version 1.0
*/
trait StringOps {
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
- def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep
- def ojoinOr(xs: Seq[String], sep: String, orElse: String) = {
- val ys = oempty(xs: _*)
- if (ys.isEmpty) orElse else ys mkString sep
- }
- def trimTrailingSpace(s: String) = {
- if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s
- else {
- var idx = s.length - 1
- while (idx >= 0 && s.charAt(idx).isWhitespace)
- idx -= 1
-
- s.substring(0, idx + 1)
+ def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
+ def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
+ def longestCommonPrefix(xs: List[String]): String = {
+ if (xs.isEmpty || xs.contains("")) ""
+ else xs.head.head match {
+ case ch =>
+ if (xs.tail forall (_.head == ch)) "" + ch + longestCommonPrefix(xs map (_.tail))
+ else ""
}
}
@@ -49,14 +40,6 @@ trait StringOps {
def words(str: String): List[String] = decompose(str, ' ')
- def stripPrefixOpt(str: String, prefix: String): Option[String] =
- if (str startsWith prefix) Some(str drop prefix.length)
- else None
-
- def stripSuffixOpt(str: String, suffix: String): Option[String] =
- if (str endsWith suffix) Some(str dropRight suffix.length)
- else None
-
def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] =
splitAt(str, str indexWhere f, doDropIndex)
@@ -65,10 +48,6 @@ trait StringOps {
else Some((str take idx, str drop (if (doDropIndex) idx + 1 else idx)))
/** Returns a string meaning "n elements".
- *
- * @param n ...
- * @param elements ...
- * @return ...
*/
def countElementsAsString(n: Int, elements: String): String =
n match {
@@ -81,9 +60,6 @@ trait StringOps {
}
/** Turns a count into a friendly English description if n<=4.
- *
- * @param n ...
- * @return ...
*/
def countAsString(n: Int): String =
n match {
diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
index e7579229b2..9259c5abf1 100644
--- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
+++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
@@ -6,7 +6,7 @@ trait StripMarginInterpolator {
def stringContext: StringContext
/**
- * A safe combination of `[[scala.collection.immutable.StringLike#stripMargin]]
+ * A safe combination of [[scala.collection.immutable.StringLike#stripMargin]]
* and [[scala.StringContext#raw]].
*
* The margin of each line is defined by whitespace leading up to a '|' character.
diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala
index 8e2bcc2ff7..d57c59757d 100644
--- a/src/reflect/scala/reflect/internal/util/TableDef.scala
+++ b/src/reflect/scala/reflect/internal/util/TableDef.scala
@@ -19,8 +19,8 @@ class TableDef[T](_cols: Column[T]*) {
* if none is specified, a space is used.
*/
def ~(next: Column[T]) = retThis(cols :+= next)
- def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, false)
- def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, true)
+ def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = false)
+ def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = true)
def >+(sep: String) = retThis(separators += ((cols.size - 1, sep)))
/** Below this point should all be considered private/internal.
@@ -67,12 +67,6 @@ class TableDef[T](_cols: Column[T]*) {
override def toString = allToSeq mkString "\n"
}
- def formatterFor(rows: Seq[T]): T => String = {
- val formatStr = new Table(rows).rowFormat
-
- x => formatStr.format(colApply(x) : _*)
- }
-
def table(rows: Seq[T]) = new Table(rows)
override def toString = cols.mkString("TableDef(", ", ", ")")
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index fa83f70f3a..632890d600 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -12,13 +12,9 @@ trait TraceSymbolActivity {
if (enabled && global.isCompilerUniverse)
scala.sys addShutdownHook showAllSymbols()
- private type Set[T] = scala.collection.immutable.Set[T]
- private val Set = scala.collection.immutable.Set
-
val allSymbols = mutable.Map[Int, Symbol]()
val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil
val prevOwners = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil
- val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0
val allTrees = mutable.Set[Tree]()
def recordSymbolsInTree(tree: Tree) {
@@ -44,38 +40,6 @@ trait TraceSymbolActivity {
}
}
- /** TODO.
- */
- private def reachableDirectlyFromSymbol(sym: Symbol): List[Symbol] = (
- List(sym.owner, sym.alias, sym.thisSym)
- ++ sym.children
- ++ sym.info.parents.map(_.typeSymbol)
- ++ sym.typeParams
- ++ sym.paramss.flatten
- )
- private def reachable[T](inputs: Traversable[T], mkSymbol: T => Symbol): Set[Symbol] = {
- def loop(seen: Set[Symbol], remaining: List[Symbol]): Set[Symbol] = {
- remaining match {
- case Nil => seen
- case head :: rest =>
- if ((head eq null) || (head eq NoSymbol) || seen(head)) loop(seen, rest)
- else loop(seen + head, rest ++ reachableDirectlyFromSymbol(head).filterNot(seen))
- }
- }
- loop(immutable.Set(), inputs.toList map mkSymbol filterNot (_ eq null) distinct)
- }
- private def treeList(t: Tree) = {
- val buf = mutable.ListBuffer[Tree]()
- t foreach (buf += _)
- buf.toList
- }
-
- private def reachableFromSymbol(root: Symbol): Set[Symbol] =
- reachable[Symbol](List(root, root.info.typeSymbol), x => x)
-
- private def reachableFromTree(tree: Tree): Set[Symbol] =
- reachable[Tree](treeList(tree), _.symbol)
-
private def signature(id: Int) = runBeforeErasure(allSymbols(id).defString)
private def dashes(s: Any): String = ("" + s) map (_ => '-')
@@ -119,7 +83,7 @@ trait TraceSymbolActivity {
}
println("\n")
}
- private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String = (x: U) => "" + x) = {
+ private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String) = {
showMapFreq(xs.toList groupBy groupFn)(showFn)
}
private lazy val findErasurePhase: Phase = {
@@ -129,7 +93,7 @@ trait TraceSymbolActivity {
}
ph
}
- private def runBeforeErasure[T](body: => T): T = atPhase(findErasurePhase)(body)
+ private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body)
def showAllSymbols() {
if (!enabled) return
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index 9882aad5e5..41e74f80e9 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -1,9 +1,6 @@
package scala.reflect.internal.util
import scala.collection.mutable
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.mutable.Builder
-import scala.collection.mutable.SetBuilder
import scala.collection.generic.Clearable
import scala.runtime.AbstractFunction1
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
index 6d77235db6..1ca57b81ed 100644
--- a/src/reflect/scala/reflect/internal/util/package.scala
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -1,7 +1,36 @@
-package scala.reflect
+package scala
+package reflect
package internal
package object util {
+ import StringOps.longestCommonPrefix
+
+ // Shorten a name like Symbols$FooSymbol to FooSymbol.
+ private def shortenName(name: String): String = {
+ if (name == "") return ""
+ val segments = (name split '$').toList
+ val last = segments.last
+
+ if (last.length == 0)
+ segments takeRight 2 mkString "$"
+ else
+ last
+ }
+
+ def shortClassOfInstance(x: AnyRef): String = shortClass(x.getClass)
+ def shortClass(clazz: Class[_]): String = {
+ val name: String = (clazz.getName split '.').last
+ def isModule = name endsWith "$" // object
+ def isAnon = (name split '$').last forall (_.isDigit) // anonymous class
+
+ if (isModule)
+ (name split '$' filterNot (_ == "")).last + "$"
+ else if (isAnon) {
+ val parents = clazz.getSuperclass :: clazz.getInterfaces.toList
+ parents map (c => shortClass(c)) mkString " with "
+ }
+ else shortenName(name)
+ }
/**
* Adds the `sm` String interpolator to a [[scala.StringContext]].
*/
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index 15befb67f1..8b69efc749 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -7,16 +7,16 @@
package scala.reflect
package io
-import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
+import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream }
import java.io.{ File => JFile }
import java.net.URL
import scala.collection.mutable.ArrayBuffer
/**
* An abstraction over files for use in the reflection/compiler libraries.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
- *
+ *
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
@@ -27,7 +27,7 @@ object AbstractFile {
/**
* If the specified File exists and is a regular file, returns an
- * abstract regular file backed by it. Otherwise, returns <code>null</code>.
+ * abstract regular file backed by it. Otherwise, returns `null`.
*/
def getFile(file: File): AbstractFile =
if (file.isFile) new PlainFile(file) else null
@@ -38,10 +38,7 @@ object AbstractFile {
/**
* If the specified File exists and is either a directory or a
* readable zip or jar archive, returns an abstract directory
- * backed by it. Otherwise, returns <code>null</code>.
- *
- * @param file ...
- * @return ...
+ * backed by it. Otherwise, returns `null`.
*/
def getDirectory(file: File): AbstractFile =
if (file.isDirectory) new PlainFile(file)
@@ -51,15 +48,14 @@ object AbstractFile {
/**
* If the specified URL exists and is a readable zip or jar archive,
* returns an abstract directory backed by it. Otherwise, returns
- * <code>null</code>.
- *
- * @param file ...
- * @return ...
+ * `null`.
*/
def getURL(url: URL): AbstractFile = {
if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
else ZipArchive fromURL url
}
+
+ def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
/**
@@ -80,12 +76,12 @@ object AbstractFile {
* </p>
* <p>
* The interface does <b>not</b> allow to access the content.
- * The class <code>symtab.classfile.AbstractFileReader</code> accesses
+ * The class `symtab.classfile.AbstractFileReader` accesses
* bytes, knowing that the character set of classfiles is UTF-8. For
- * all other cases, the class <code>SourceFile</code> is used, which honors
- * <code>global.settings.encoding.value</code>.
+ * all other cases, the class `SourceFile` is used, which honors
+ * `global.settings.encoding.value`.
* </p>
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
abstract class AbstractFile extends Iterable[AbstractFile] {
@@ -130,6 +126,9 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
/** Is this abstract file a directory? */
def isDirectory: Boolean
+ /** Does this abstract file correspond to something on-disk? */
+ def isVirtual: Boolean = false
+
/** Returns the time that this abstract file was last modified. */
def lastModified: Long
@@ -148,7 +147,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
def toURL: URL = if (file == null) null else file.toURI.toURL
/** Returns contents of file (if applicable) in a Char array.
- * warning: use <code>Global.getSourceFile()</code> to use the proper
+ * warning: use `Global.getSourceFile()` to use the proper
* encoding when converting to the char array.
*/
@throws(classOf[IOException])
@@ -159,24 +158,36 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
@throws(classOf[IOException])
def toByteArray: Array[Byte] = {
val in = input
- var rest = sizeOption.getOrElse(0)
- val arr = new Array[Byte](rest)
- while (rest > 0) {
- val res = in.read(arr, arr.length - rest, rest)
- if (res == -1)
- throw new IOException("read error")
- rest -= res
+ sizeOption match {
+ case Some(size) =>
+ var rest = size
+ val arr = new Array[Byte](rest)
+ while (rest > 0) {
+ val res = in.read(arr, arr.length - rest, rest)
+ if (res == -1)
+ throw new IOException("read error")
+ rest -= res
+ }
+ in.close()
+ arr
+ case None =>
+ val out = new ByteArrayOutputStream()
+ var c = in.read()
+ while(c != -1) {
+ out.write(c)
+ c = in.read()
+ }
+ in.close()
+ out.toByteArray()
}
- in.close()
- arr
}
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile]
/** Returns the abstract file in this abstract directory with the specified
- * name. If there is no such file, returns <code>null</code>. The argument
- * <code>directory</code> tells whether to look for a directory or
+ * name. If there is no such file, returns `null`. The argument
+ * `directory` tells whether to look for a directory or
* a regular file.
*/
def lookupName(name: String, directory: Boolean): AbstractFile
@@ -186,19 +197,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
*/
def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile
- /** Returns the abstract file in this abstract directory with the specified
- * path relative to it, If there is no such file, returns null. The argument
- * <code>directory</code> tells whether to look for a directory or a regular
- * file.
- *
- * @param path ...
- * @param directory ...
- * @return ...
- */
- def lookupPath(path: String, directory: Boolean): AbstractFile = {
- lookup((f, p, dir) => f.lookupName(p, dir), path, directory)
- }
-
/** Return an abstract file that does not check that `path` denotes
* an existing file.
*/
@@ -243,7 +241,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
*/
def fileNamed(name: String): AbstractFile = {
assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- fileOrSubdirectoryNamed(name, false)
+ fileOrSubdirectoryNamed(name, isDir = false)
}
/**
@@ -252,7 +250,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
*/
def subdirectoryNamed(name: String): AbstractFile = {
assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- fileOrSubdirectoryNamed(name, true)
+ fileOrSubdirectoryNamed(name, isDir = true)
}
protected def unsupported(): Nothing = unsupported(null)
diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala
index c040d1eac5..4bf9ed8a36 100644
--- a/src/reflect/scala/reflect/io/Directory.scala
+++ b/src/reflect/scala/reflect/io/Directory.scala
@@ -14,12 +14,10 @@ import java.io.{ File => JFile }
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object Directory {
- import scala.util.Properties.{ tmpDir, userHome, userDir }
+ import scala.util.Properties.{ userHome, userDir }
private def normalizePath(s: String) = Some(apply(Path(s).normalize))
def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir)
- def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome)
- def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir)
def apply(path: Path): Directory = path.toDirectory
@@ -30,20 +28,18 @@ object Directory {
path.createDirectory()
}
}
-import Path._
/** An abstraction for directories.
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
*/
class Directory(jfile: JFile) extends Path(jfile) {
override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
override def toDirectory: Directory = this
override def toFile: File = new File(jfile)
- override def isValid = jfile.isDirectory() || !jfile.exists()
override def normalize: Directory = super.normalize.toDirectory
/** An iterator over the contents of this directory.
@@ -60,7 +56,6 @@ class Directory(jfile: JFile) extends Path(jfile) {
override def walkFilter(cond: Path => Boolean): Iterator[Path] =
list filter cond flatMap (_ walkFilter cond)
- def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList())
def deepFiles: Iterator[File] = Path.onlyFiles(deepList())
/** If optional depth argument is not given, will recurse
@@ -70,10 +65,4 @@ class Directory(jfile: JFile) extends Path(jfile) {
if (depth < 0) list ++ (dirs flatMap (_ deepList (depth)))
else if (depth == 0) Iterator.empty
else list ++ (dirs flatMap (_ deepList (depth - 1)))
-
- /** An iterator over the directories underneath this directory,
- * to the (optionally) given depth.
- */
- def subdirs(depth: Int = 1): Iterator[Directory] =
- deepList(depth) collect { case x: Directory => x }
}
diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala
index 736ba5d51e..64651dcfbd 100644
--- a/src/reflect/scala/reflect/io/File.scala
+++ b/src/reflect/scala/reflect/io/File.scala
@@ -22,8 +22,7 @@ import scala.language.{reflectiveCalls, implicitConversions}
*/
object File {
def pathSeparator = java.io.File.pathSeparator
- def separator = java.io.File.separator
-
+ def separator = java.io.File.separator
def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec)
// Create a temporary file, which will be deleted upon jvm exit.
@@ -32,41 +31,7 @@ object File {
jfile.deleteOnExit()
apply(jfile)
}
-
- type HasClose = { def close(): Unit }
-
- def closeQuietly(target: HasClose) {
- try target.close() catch { case e: IOException => }
- }
- def closeQuietly(target: JCloseable) {
- try target.close() catch { case e: IOException => }
- }
-
- // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
- // we are using a static initializer to statically initialize a java class so we don't
- // trigger java.lang.InternalErrors later when using it concurrently. We ignore all
- // the exceptions so as not to cause spurious failures when no write access is available,
- // e.g. google app engine.
- //
- // XXX need to put this behind a setting.
- //
- // try {
- // import Streamable.closing
- // val tmp = java.io.File.createTempFile("bug6503430", null, null)
- // try closing(new FileInputStream(tmp)) { in =>
- // val inc = in.getChannel()
- // closing(new FileOutputStream(tmp, true)) { out =>
- // out.getChannel().transferFrom(inc, 0, 0)
- // }
- // }
- // finally tmp.delete()
- // }
- // catch {
- // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
- // }
}
-import File._
-import Path._
/** An abstraction for files. For character data, a Codec
* can be supplied at either creation time or when a method
@@ -76,19 +41,17 @@ import Path._
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
*/
class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars {
override val creationCodec = constructorCodec
- def withCodec(codec: Codec): File = new File(jfile)(codec)
override def addExtension(ext: String): File = super.addExtension(ext).toFile
override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile
override def toDirectory: Directory = new Directory(jfile)
override def toFile: File = this
override def normalize: File = super.normalize.toFile
- override def isValid = jfile.isFile() || !jfile.exists()
override def length = super[Path].length
override def walkFilter(cond: Path => Boolean): Iterator[Path] =
if (cond(this)) Iterator.single(this) else Iterator.empty
@@ -99,26 +62,22 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
/** Obtains a OutputStream. */
def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append)
def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append))
- def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true)
/** Obtains an OutputStreamWriter wrapped around a FileOutputStream.
* This should behave like a less broken version of java.io.FileWriter,
* in that unlike the java version you can specify the encoding.
*/
- def writer(): OutputStreamWriter = writer(false)
- def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec)
def writer(append: Boolean, codec: Codec): OutputStreamWriter =
new OutputStreamWriter(outputStream(append), codec.charSet)
/** Wraps a BufferedWriter around the result of writer().
*/
- def bufferedWriter(): BufferedWriter = bufferedWriter(false)
+ def bufferedWriter(): BufferedWriter = bufferedWriter(append = false)
def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec)
def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter =
new BufferedWriter(writer(append, codec))
def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true)
- def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true)
/** Creates a new file and writes all the Strings to it. */
def writeAll(strings: String*): Unit = {
@@ -127,12 +86,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
finally out.close()
}
- def writeBytes(bytes: Array[Byte]): Unit = {
- val out = bufferedOutput()
- try out write bytes
- finally out.close()
- }
-
def appendAll(strings: String*): Unit = {
val out = bufferedWriter(append = true)
try strings foreach (out write _)
@@ -150,39 +103,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
try Some(slurp())
catch { case _: IOException => None }
- def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = {
- val CHUNK = 1024 * 1024 * 16 // 16 MB
- val dest = destPath.toFile
- if (!isValid) fail("Source %s is not a valid file." format name)
- if (this.normalize == dest.normalize) fail("Source and destination are the same.")
- if (!dest.parent.exists) fail("Destination cannot be created.")
- if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.")
- if (dest.isDirectory) fail("Destination exists but is a directory.")
-
- lazy val in_s = inputStream()
- lazy val out_s = dest.outputStream()
- lazy val in = in_s.getChannel()
- lazy val out = out_s.getChannel()
-
- try {
- val size = in.size()
- var pos, count = 0L
- while (pos < size) {
- count = (size - pos) min CHUNK
- pos += out.transferFrom(in, pos, count)
- }
- }
- finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly
-
- if (this.length != dest.length)
- fail("Failed to completely copy %s to %s".format(name, dest.name))
-
- if (preserveFileDate)
- dest.lastModified = this.lastModified
-
- true
- }
-
/** Reflection since we're into the java 6+ API.
*/
def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = {
diff --git a/src/reflect/scala/reflect/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala
index 8c88d3abf6..2c59fd8aae 100644
--- a/src/reflect/scala/reflect/io/NoAbstractFile.scala
+++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala
@@ -22,6 +22,7 @@ object NoAbstractFile extends AbstractFile {
def file: JFile = null
def input: InputStream = null
def isDirectory: Boolean = false
+ override def isVirtual: Boolean = true
def iterator: Iterator[AbstractFile] = Iterator.empty
def lastModified: Long = 0L
def lookupName(name: String, directory: Boolean): AbstractFile = null
diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala
index 36fdc04db4..44fb41a1cd 100644
--- a/src/reflect/scala/reflect/io/Path.scala
+++ b/src/reflect/scala/reflect/io/Path.scala
@@ -27,7 +27,7 @@ import scala.language.implicitConversions
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object Path {
@@ -49,32 +49,18 @@ object Path {
implicit def string2path(s: String): Path = apply(s)
implicit def jfile2path(jfile: JFile): Path = apply(jfile)
- // java 7 style, we don't use it yet
- // object AccessMode extends Enumeration {
- // val EXECUTE, READ, WRITE = Value
- // }
- // def checkAccess(modes: AccessMode*): Boolean = {
- // modes foreach {
- // case EXECUTE => throw new Exception("Unsupported") // can't check in java 5
- // case READ => if (!jfile.canRead()) return false
- // case WRITE => if (!jfile.canWrite()) return false
- // }
- // true
- // }
-
def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
- def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
def roots: List[Path] = java.io.File.listRoots().toList map Path.apply
- def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator)
def apply(path: String): Path = apply(new JFile(path))
- def apply(jfile: JFile): Path =
+ def apply(jfile: JFile): Path = try {
if (jfile.isFile) new File(jfile)
else if (jfile.isDirectory) new Directory(jfile)
else new Path(jfile)
+ } catch { case ex: SecurityException => new Path(jfile) }
/** Avoiding any shell/path issues by only using alphanumerics. */
private[io] def randomPrefix = alphanumeric take 6 mkString ""
@@ -84,19 +70,13 @@ import Path._
/** The Path constructor is private so we can enforce some
* semantics regarding how a Path might relate to the world.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class Path private[io] (val jfile: JFile) {
val separator = java.io.File.separatorChar
val separatorStr = java.io.File.separator
- // Validation: this verifies that the type of this object and the
- // contents of the filesystem are in agreement. All objects are
- // valid except File objects whose path points to a directory and
- // Directory objects whose path points to a file.
- def isValid: Boolean = true
-
// conversions
def toFile: File = new File(jfile)
def toDirectory: Directory = new Directory(jfile)
@@ -104,6 +84,7 @@ class Path private[io] (val jfile: JFile) {
def toCanonical: Path = Path(jfile.getCanonicalPath())
def toURI: URI = jfile.toURI()
def toURL: URL = toURI.toURL()
+
/** If this path is absolute, returns it: otherwise, returns an absolute
* path made up of root / this.
*/
@@ -136,7 +117,6 @@ class Path private[io] (val jfile: JFile) {
def name: String = jfile.getName()
def path: String = jfile.getPath()
def normalize: Path = Path(jfile.getAbsolutePath())
- def isRootPath: Boolean = roots exists (_ isSame this)
def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
def relativize(other: Path) = {
@@ -152,9 +132,8 @@ class Path private[io] (val jfile: JFile) {
Path(createRelativePath(segments, other.segments))
}
- // derived from identity
- def root: Option[Path] = roots find (this startsWith _)
def segments: List[String] = (path split separator).toList filterNot (_.length == 0)
+
/**
* @return The path of the parent directory, or root if path is already root
*/
@@ -185,10 +164,6 @@ class Path private[io] (val jfile: JFile) {
if (i < 0) ""
else name.substring(i + 1)
}
- // def extension: String = (name lastIndexOf '.') match {
- // case -1 => ""
- // case idx => name drop (idx + 1)
- // }
// compares against extensions in a CASE INSENSITIVE way.
def hasExtension(ext: String, exts: String*) = {
val lower = extension.toLowerCase
@@ -212,23 +187,19 @@ class Path private[io] (val jfile: JFile) {
// Boolean tests
def canRead = jfile.canRead()
def canWrite = jfile.canWrite()
- def exists = jfile.exists()
- def notExists = try !jfile.exists() catch { case ex: SecurityException => false }
+ def exists = try jfile.exists() catch { case ex: SecurityException => false }
- def isFile = jfile.isFile()
- def isDirectory = jfile.isDirectory()
+ def isFile = try jfile.isFile() catch { case ex: SecurityException => false }
+ def isDirectory = try jfile.isDirectory() catch { case ex: SecurityException => false }
def isAbsolute = jfile.isAbsolute()
- def isHidden = jfile.isHidden()
def isEmpty = path.length == 0
// Information
def lastModified = jfile.lastModified()
- def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function?
def length = jfile.length()
// Boolean path comparisons
def endsWith(other: Path) = segments endsWith other.segments
- def startsWith(other: Path) = segments startsWith other.segments
def isSame(other: Path) = toCanonical == other.toCanonical
def isFresher(other: Path) = lastModified > other.lastModified
@@ -248,7 +219,6 @@ class Path private[io] (val jfile: JFile) {
// deletions
def delete() = jfile.delete()
- def deleteIfExists() = if (jfile.exists()) delete() else false
/** Deletes the path recursively. Returns false on failure.
* Use with caution!
@@ -270,16 +240,6 @@ class Path private[io] (val jfile: JFile) {
length == 0
}
- def touch(modTime: Long = System.currentTimeMillis) = {
- createFile()
- if (isFile)
- lastModified = modTime
- }
-
- // todo
- // def copyTo(target: Path, options ...): Boolean
- // def moveTo(target: Path, options ...): Boolean
-
override def toString() = path
override def equals(other: Any) = other match {
case x: Path => path == x.path
diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala
index 82b0568657..31df78f995 100644
--- a/src/reflect/scala/reflect/io/PlainFile.scala
+++ b/src/reflect/scala/reflect/io/PlainFile.scala
@@ -3,23 +3,11 @@
* @author Martin Odersky
*/
-
package scala.reflect
package io
import java.io.{ FileInputStream, FileOutputStream, IOException }
-import PartialFunction._
-/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
-object PlainFile {
- /**
- * If the specified File exists, returns an abstract file backed
- * by it. Otherwise, returns null.
- */
- def fromPath(file: Path): PlainFile =
- if (file.isDirectory) new PlainDirectory(file.toDirectory)
- else if (file.isFile) new PlainFile(file)
- else null
-}
+
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
override def isDirectory = true
@@ -28,7 +16,7 @@ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
}
/** This class implements an abstract file backed by a File.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class PlainFile(val givenPath: Path) extends AbstractFile {
@@ -54,7 +42,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
override def sizeOption = Some(givenPath.length.toInt)
override def toString = path
- override def hashCode(): Int = fpath.hashCode
+ override def hashCode(): Int = fpath.hashCode()
override def equals(that: Any): Boolean = that match {
case x: PlainFile => fpath == x.fpath
case _ => false
@@ -77,10 +65,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
* specified name. If there is no such file, returns null. The
* argument "directory" tells whether to look for a directory or
* or a regular file.
- *
- * @param name ...
- * @param directory ...
- * @return ...
*/
def lookupName(name: String, directory: Boolean): AbstractFile = {
val child = givenPath / name
diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala
index 61ec8a4c23..1d51ad7f54 100644
--- a/src/reflect/scala/reflect/io/Streamable.scala
+++ b/src/reflect/scala/reflect/io/Streamable.scala
@@ -17,14 +17,14 @@ import Path.fail
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object Streamable {
/** Traits which can be viewed as a sequence of bytes. Source types
* which know their length should override def length: Long for more
* efficient method implementations.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
trait Bytes {
@@ -69,7 +69,7 @@ object Streamable {
}
/** For objects which can be viewed as Chars.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
trait Chars extends Bytes {
@@ -81,7 +81,6 @@ object Streamable {
*/
def creationCodec: Codec = implicitly[Codec]
- def chars(): BufferedSource = chars(creationCodec)
def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec)
def lines(): Iterator[String] = lines(creationCodec)
@@ -89,8 +88,7 @@ object Streamable {
/** Obtains an InputStreamReader wrapped around a FileInputStream.
*/
- def reader(): InputStreamReader = reader(creationCodec)
- def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet)
+ def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream(), codec.charSet)
/** Wraps a BufferedReader around the result of reader().
*/
@@ -108,7 +106,10 @@ object Streamable {
/** Convenience function to import entire file into a String.
*/
def slurp(): String = slurp(creationCodec)
- def slurp(codec: Codec) = chars(codec).mkString
+ def slurp(codec: Codec) = {
+ val src = chars(codec)
+ try src.mkString finally src.close() // Always Be Closing
+ }
}
/** Call a function on something Closeable, finally closing it. */
@@ -117,7 +118,9 @@ object Streamable {
finally stream.close()
def bytes(is: => InputStream): Array[Byte] =
- (new Bytes { def inputStream() = is }).toByteArray
+ (new Bytes {
+ def inputStream() = is
+ }).toByteArray()
def slurp(is: => InputStream)(implicit codec: Codec): String =
new Chars { def inputStream() = is } slurp codec
diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala
index 78713c2ae0..210167e5c6 100644
--- a/src/reflect/scala/reflect/io/VirtualDirectory.scala
+++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala
@@ -11,7 +11,7 @@ import scala.collection.mutable
* An in-memory directory.
*
* @author Lex Spoon
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory])
@@ -26,22 +26,23 @@ extends AbstractFile {
def container = maybeContainer.get
def isDirectory = true
- var lastModified: Long = System.currentTimeMillis
+ override def isVirtual = true
+ val lastModified: Long = System.currentTimeMillis
override def file = null
override def input = sys.error("directories cannot be read")
override def output = sys.error("directories cannot be written")
/** Does this abstract file denote an existing file? */
- def create() { unsupported }
+ def create() { unsupported() }
/** Delete the underlying file or directory (recursively). */
- def delete() { unsupported }
+ def delete() { unsupported() }
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported()
private val files = mutable.Map.empty[String, AbstractFile]
@@ -53,20 +54,20 @@ extends AbstractFile {
(files get name filter (_.isDirectory == directory)).orNull
override def fileNamed(name: String): AbstractFile =
- Option(lookupName(name, false)) getOrElse {
+ Option(lookupName(name, directory = false)) getOrElse {
val newFile = new VirtualFile(name, path+'/'+name)
files(name) = newFile
newFile
}
override def subdirectoryNamed(name: String): AbstractFile =
- Option(lookupName(name, true)) getOrElse {
+ Option(lookupName(name, directory = true)) getOrElse {
val dir = new VirtualDirectory(name, Some(this))
files(name) = dir
dir
}
def clear() {
- files.clear();
+ files.clear()
}
}
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index 95f4429fad..8cc83b6a50 100644
--- a/src/reflect/scala/reflect/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -3,7 +3,6 @@
* @author Martin Odersky
*/
-
package scala.reflect
package io
@@ -14,7 +13,7 @@ import java.io.{ File => JFile }
*
* @author Philippe Altherr
* @version 1.0, 23/03/2004
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class VirtualFile(val name: String, override val path: String) extends AbstractFile {
@@ -33,20 +32,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
case _ => false
}
- //########################################################################
- // Private data
private var content = Array.emptyByteArray
- //########################################################################
- // Public Methods
def absolute = this
/** Returns null. */
- final def file: JFile = null
+ def file: JFile = null
- override def sizeOption: Option[Int] = Some(content.size)
+ override def sizeOption: Option[Int] = Some(content.length)
- def input : InputStream = new ByteArrayInputStream(content);
+ def input : InputStream = new ByteArrayInputStream(content)
override def output: OutputStream = {
new ByteArrayOutputStream() {
@@ -62,10 +57,12 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
/** Is this abstract file a directory? */
def isDirectory: Boolean = false
+ /** @inheritdoc */
+ override def isVirtual: Boolean = true
+
/** Returns the time that this abstract file was last modified. */
private var _lastModified: Long = 0
def lastModified: Long = _lastModified
- def lastModified_=(x: Long) = _lastModified = x
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
@@ -74,20 +71,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
}
/** Does this abstract file denote an existing file? */
- def create() { unsupported }
+ def create() { unsupported() }
/** Delete the underlying file or directory (recursively). */
- def delete() { unsupported }
+ def delete() { unsupported() }
/**
* Returns the abstract file in this abstract directory with the
* specified name. If there is no such file, returns null. The
* argument "directory" tells whether to look for a directory or
* or a regular file.
- *
- * @param name ...
- * @param directory ...
- * @return ...
*/
def lookupName(name: String, directory: Boolean): AbstractFile = {
assert(isDirectory, "not a directory '" + this + "'")
@@ -97,7 +90,5 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
-
- //########################################################################
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported()
}
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 3b57721e89..1342fde3c5 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -7,10 +7,12 @@ package scala.reflect
package io
import java.net.URL
-import java.io.{ IOException, InputStream, ByteArrayInputStream }
+import java.io.{ IOException, InputStream, ByteArrayInputStream, FilterInputStream }
import java.io.{ File => JFile }
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
+import java.util.jar.Manifest
import scala.collection.{ immutable, mutable }
+import scala.collection.convert.WrapAsScala.asScalaIterator
import scala.annotation.tailrec
/** An abstraction for zip files and streams. Everything is written the way
@@ -20,13 +22,10 @@ import scala.annotation.tailrec
* @author Philippe Altherr (original version)
* @author Paul Phillips (this one)
* @version 2.0,
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object ZipArchive {
- def fromPath(path: String): FileZipArchive = fromFile(new JFile(path))
- def fromPath(path: Path): FileZipArchive = fromFile(path.toFile)
-
/**
* @param file a File
* @return A ZipArchive if `file` is a readable zip file, otherwise null.
@@ -41,10 +40,11 @@ object ZipArchive {
* @return A ZipArchive backed by the given url.
*/
def fromURL(url: URL): URLZipArchive = new URLZipArchive(url)
- def fromURL(url: String): URLZipArchive = fromURL(new URL(url))
- private def dirName(path: String) = splitPath(path, true)
- private def baseName(path: String) = splitPath(path, false)
+ def fromManifestURL(url: URL): AbstractFile = new ManifestResources(url)
+
+ private def dirName(path: String) = splitPath(path, front = true)
+ private def baseName(path: String) = splitPath(path, front = false)
private def splitPath(path0: String, front: Boolean): String = {
val isDir = path0.charAt(path0.length - 1) == '/'
val path = if (isDir) path0.substring(0, path0.length - 1) else path0
@@ -65,13 +65,13 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
override def underlyingSource = Some(this)
def isDirectory = true
- def lookupName(name: String, directory: Boolean) = unsupported
- def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
- def create() = unsupported
- def delete() = unsupported
- def output = unsupported
- def container = unsupported
- def absolute = unsupported
+ def lookupName(name: String, directory: Boolean) = unsupported()
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported()
+ def create() = unsupported()
+ def delete() = unsupported()
+ def output = unsupported()
+ def container = unsupported()
+ def absolute = unsupported()
private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
its flatMap { f =>
@@ -79,7 +79,6 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
else Iterator(f)
}
}
- def deepIterator = walkIterator(iterator)
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
// have to keep this name for compat with sbt's compiler-interface
@@ -232,3 +231,59 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) {
case _ => false
}
}
+
+final class ManifestResources(val url: URL) extends ZipArchive(null) {
+ def iterator = {
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+ val manifest = new Manifest(input)
+ val iter = manifest.getEntries().keySet().iterator().filter(_.endsWith(".class")).map(new ZipEntry(_))
+
+ while (iter.hasNext) {
+ val zipEntry = iter.next()
+ val dir = getDir(dirs, zipEntry)
+ if (zipEntry.isDirectory) dir
+ else {
+ class FileEntry() extends Entry(zipEntry.getName) {
+ override def lastModified = zipEntry.getTime()
+ override def input = resourceInputStream(path)
+ override def sizeOption = None
+ }
+ val f = new FileEntry()
+ dir.entries(f.name) = f
+ }
+ }
+
+ try root.iterator
+ finally dirs.clear()
+ }
+
+ def name = path
+ def path: String = url.getPath() match { case s => s.substring(0, s.lastIndexOf('!')) }
+ def input = url.openStream()
+ def lastModified =
+ try url.openConnection().getLastModified()
+ catch { case _: IOException => 0 }
+
+ override def canEqual(other: Any) = other.isInstanceOf[ManifestResources]
+ override def hashCode() = url.hashCode
+ override def equals(that: Any) = that match {
+ case x: ManifestResources => url == x.url
+ case _ => false
+ }
+
+ private def resourceInputStream(path: String): InputStream = {
+ new FilterInputStream(null) {
+ override def read(): Int = {
+ if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
+ if(in == null) throw new RuntimeException(path + " not found")
+ super.read();
+ }
+
+ override def close(): Unit = {
+ super.close();
+ in = null;
+ }
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index 007df3b6e2..eeb87fafcc 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -56,8 +56,6 @@ abstract class Attachments { self =>
// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
// IDE via $outer pointers.
-// Forward compatibility note: This class used to be Attachments$NonemptyAttachments.
-// However it's private, therefore it transcends the compatibility policy for 2.10.x.
private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
type Pos = P
def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
index aa1c1db227..f4a4631e53 100644
--- a/src/reflect/scala/reflect/macros/Context.scala
+++ b/src/reflect/scala/reflect/macros/Context.scala
@@ -36,7 +36,8 @@ trait Context extends Aliases
with Typers
with Parsers
with Evals
- with ExprUtils {
+ with ExprUtils
+ with Synthetics {
/** The compile-time universe. */
val universe: Universe
@@ -51,7 +52,7 @@ trait Context extends Aliases
/** The prefix tree from which the macro is selected.
*
- * For a example, for a macro `filter` defined as an instance method on a collection `Coll`,
+ * For example, for a macro `filter` defined as an instance method on a collection `Coll`,
* `prefix` represents an equivalent of `this` for normal instance methods:
*
* {{{
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index c48656b366..fd91333dae 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -15,19 +15,32 @@ trait Enclosures {
/** The tree that undergoes macro expansion.
* Can be useful to get an offset or a range position of the entire tree being processed.
*/
- val macroApplication: Tree
+ def macroApplication: Tree
+
+ /** The semantic role that `macroApplication` plays in the code.
+ */
+ type MacroRole
+
+ /** The role that represents an application of a term macro,
+ * e.g. `M(2)(3)` in `val x = M(2)(3)` or `M(a, b)` in `x match { case x @ M(a, b) => }`.
+ */
+ def APPLY_ROLE: MacroRole
+
+ /** The semantic role that `macroApplication` plays in the code.
+ */
+ def macroRole: MacroRole
/** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
* Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
*
* Is also priceless for emitting sane error messages for macros that are called by other macros on synthetic (i.e. position-less) trees.
- * In that dire case navigate the ``enclosingMacros'' stack, and it will most likely contain at least one macro with a position-ful macro application.
- * See ``enclosingPosition'' for a default implementation of this logic.
+ * In that dire case navigate the `enclosingMacros` stack, and it will most likely contain at least one macro with a position-ful macro application.
+ * See `enclosingPosition` for a default implementation of this logic.
*
* Unlike `openMacros`, this is a val, which means that it gets initialized when the context is created
* and always stays the same regardless of whatever happens during macro expansion.
*/
- val enclosingMacros: List[Context]
+ def enclosingMacros: List[Context]
/** Types along with corresponding trees for which implicit arguments are currently searched.
* Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
@@ -35,28 +48,56 @@ trait Enclosures {
* Unlike `openImplicits`, this is a val, which means that it gets initialized when the context is created
* and always stays the same regardless of whatever happens during macro expansion.
*/
- val enclosingImplicits: List[(Type, Tree)]
+ def enclosingImplicits: List[(Type, Tree)]
/** Tries to guess a position for the enclosing application.
- * But that is simple, right? Just dereference ``pos'' of ``macroApplication''? Not really.
+ * But that is simple, right? Just dereference `pos` of `macroApplication`? Not really.
* If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion.
- * Surprisingly, quite often we can do this by navigation the ``enclosingMacros'' stack.
+ * Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack.
*/
- val enclosingPosition: Position
+ def enclosingPosition: Position
/** Tree that corresponds to the enclosing method, or EmptyTree if not applicable.
*/
- val enclosingMethod: Tree
+ @deprecated("Use enclosingDef instead, but be wary of changes in semantics", "2.10.1")
+ def enclosingMethod: Tree
/** Tree that corresponds to the enclosing class, or EmptyTree if not applicable.
*/
- val enclosingClass: Tree
+ @deprecated("Use enclosingImpl instead, but be wary of changes in semantics", "2.10.1")
+ def enclosingClass: Tree
+
+ /** Tree that corresponds to the enclosing DefDef tree.
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingDef: universe.DefDef
+
+ /** Tree that corresponds to the enclosing Template tree.
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingTemplate: universe.Template
+
+ /** Tree that corresponds to the enclosing ImplDef tree (i.e. either ClassDef or ModuleDef).
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingImpl: universe.ImplDef
+
+ /** Tree that corresponds to the enclosing PackageDef tree.
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingPackage: universe.PackageDef
/** Compilation unit that contains this macro application.
*/
- val enclosingUnit: CompilationUnit
+ def enclosingUnit: CompilationUnit
/** Compilation run that contains this macro application.
*/
- val enclosingRun: Run
+ def enclosingRun: Run
+
+ /** Indicates than one of the enclosure methods failed to find a tree
+ * of required type among enclosing trees.
+ */
+ case class EnclosureException(expected: Class[_], enclosingTrees: List[Tree])
+ extends Exception(s"Couldn't find a tree of type $expected among enclosing trees $enclosingTrees")
} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
index 8bbaa5f848..7e2ac5e02d 100644
--- a/src/reflect/scala/reflect/macros/Names.scala
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -11,13 +11,27 @@ trait Names {
self: Context =>
/** Creates a unique string. */
+ @deprecated("Use freshName instead", "2.11.0")
def fresh(): String
/** Creates a unique string having a given prefix. */
+ @deprecated("Use freshName instead", "2.11.0")
def fresh(name: String): String
/** Creates a unique name having a given name as a prefix and
* having the same flavor (term name or type name) as the given name.
*/
+ @deprecated("Use freshName instead", "2.11.0")
def fresh[NameType <: Name](name: NameType): NameType
+
+ /** Creates a unique string. */
+ def freshName(): String
+
+ /** Creates a unique string having a given prefix. */
+ def freshName(name: String): String
+
+ /** Creates a unique name having a given name as a prefix and
+ * having the same flavor (term name or type name) as the given name.
+ */
+ def freshName[NameType <: Name](name: NameType): NameType
}
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index 93a763792c..b4b93da3fa 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -19,4 +19,4 @@ trait Parsers {
/** Indicates an error during [[scala.reflect.macros.Parsers#parse]].
*/
-case class ParseException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class ParseException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala
index 3db7b9af02..1eae3e3fce 100644
--- a/src/reflect/scala/reflect/macros/Reifiers.scala
+++ b/src/reflect/scala/reflect/macros/Reifiers.scala
@@ -11,16 +11,16 @@ trait Reifiers {
self: Context =>
/** Given a tree, generate a tree that when compiled and executed produces the original tree.
- * For more information and examples see the documentation for ``Universe.reify''.
+ * For more information and examples see the documentation for `Universe.reify`.
*
- * The produced tree will be bound to the specified ``universe'' and ``mirror''.
- * Possible values for ``universe'' include ``universe.treeBuild.mkRuntimeUniverseRef''.
- * Possible values for ``mirror'' include ``EmptyTree'' (in that case the reifier will automatically pick an appropriate mirror).
+ * The produced tree will be bound to the specified `universe` and `mirror`.
+ * Possible values for `universe` include `universe.treeBuild.mkRuntimeUniverseRef`.
+ * Possible values for `mirror` include `EmptyTree` (in that case the reifier will automatically pick an appropriate mirror).
*
- * This function is deeply connected to ``Universe.reify'', a macro that reifies arbitrary expressions into runtime trees.
- * They do very similar things (``Universe.reify'' calls ``Context.reifyTree'' to implement itself), but they operate on different metalevels (see below).
+ * This function is deeply connected to `Universe.reify`, a macro that reifies arbitrary expressions into runtime trees.
+ * They do very similar things (`Universe.reify` calls `Context.reifyTree` to implement itself), but they operate on different metalevels (see below).
*
- * Let's study the differences between ``Context.reifyTree'' and ``Universe.reify'' on an example of using them inside a ``fooMacro'' macro:
+ * Let's study the differences between `Context.reifyTree` and `Universe.reify` on an example of using them inside a `fooMacro` macro:
*
* * Since reify itself is a macro, it will be executed when fooMacro is being compiled (metalevel -1)
* and will produce a tree that when evaluated during macro expansion of fooMacro (metalevel 0) will recreate the input tree.
@@ -39,7 +39,7 @@ trait Reifiers {
* * The result of compiling and running the result of reify will be bound to the Universe that called reify.
* This is possible because it's a macro, so it can generate whatever code it wishes.
*
- * * The result of compiling and running the result of reifyTree will be the ``prefix'' that needs to be passed explicitly.
+ * * The result of compiling and running the result of reifyTree will be the `prefix` that needs to be passed explicitly.
* This happens because the Universe of the evaluated result is from a different metalevel than the Context the called reify.
*
* Typical usage of this function is to retain some of the trees received/created by a macro
@@ -48,13 +48,13 @@ trait Reifiers {
def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree
/** Given a type, generate a tree that when compiled and executed produces the original type.
- * The produced tree will be bound to the specified ``universe'' and ``mirror''.
- * For more information and examples see the documentation for ``Context.reifyTree'' and ``Universe.reify''.
+ * The produced tree will be bound to the specified `universe` and `mirror`.
+ * For more information and examples see the documentation for `Context.reifyTree` and `Universe.reify`.
*/
def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree
/** Given a type, generate a tree that when compiled and executed produces the runtime class of the original type.
- * If ``concrete'' is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does).
+ * If `concrete` is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does).
*/
def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree
@@ -86,10 +86,10 @@ trait Reifiers {
* Such errors represent one of the standard ways for reification to go wrong, e.g.
* an attempt to create a `TypeTag` from a weak type.
*/
-case class ReificationException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class ReificationException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
/** Indicates an unexpected expected error during one of the `reifyXXX` methods in [[scala.reflect.macros.Reifiers]].
* Such errors wrap random crashes in reification logic and are distinguished from expected [[scala.reflect.macros.ReificationException]]s
* so that the latter can be reported as compilation errors, while the former manifest themselves as compiler crashes.
*/
-case class UnexpectedReificationException(val pos: scala.reflect.api.Position, val msg: String, val cause: Throwable = null) extends Exception(msg, cause)
+case class UnexpectedReificationException(pos: scala.reflect.api.Position, msg: String, cause: Throwable = null) extends Exception(msg, cause)
diff --git a/src/reflect/scala/reflect/macros/Synthetics.scala b/src/reflect/scala/reflect/macros/Synthetics.scala
new file mode 100644
index 0000000000..14c6c930b3
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Synthetics.scala
@@ -0,0 +1,106 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * exposes functions to introduce synthetic definitions.
+ *
+ * @define TOPLEVEL_TREE Top-level tree is a tree that represents a non-inner class or object in one of the currently compiled source files.
+ * Note that top-level isn't equivalent to [[scala.reflect.api.Symbols#SymbolApi.isStatic]],
+ * because static also embraces definitions nested in static objects
+ *
+ * @define INTRODUCE_TOP_LEVEL Allowed definitions include classes (represented by `ClassDef` trees), traits (represented
+ * by `ClassDef` trees having the `TRAIT` flag set in `mods`) and objects (represented by `ModuleDef` trees).
+ *
+ * The definitions are put into the package with a prototype provided in `packagePrototype`.
+ * Supported prototypes are (see [[PackageSpec]] for more details):
+ * * Strings and names representing a fully-qualified name of the package
+ * * Trees that can work as package ids
+ * * Package or package class symbols
+ *
+ * Typical value for a package prototype is a fully-qualified name in a string.
+ * For example, to generate a class available at `foo.bar.Test`, call this method as follows:
+ *
+ * introduceTopLevel("foo.bar", ClassDef(<mods>, TypeName("Test"), <tparams>, <template>))
+ *
+ * It is possible to add definitions to the empty package by using `nme.EMPTY_PACKAGE_NAME.toString`, but
+ * that's not recommended, since such definitions cannot be seen from outside the empty package.
+ *
+ * Only the multi-parameter overload of this method can be used to introduce companions.
+ * If companions are introduced by two different calls, then they will be put into different virtual files, and `scalac`
+ * will show an error about companions being defined in different files. By the way, this also means that there's currently no way
+ * to define a companion for an existing class or module
+ */
+trait Synthetics {
+ self: Context =>
+
+ import universe._
+
+ /** Looks up a top-level definition tree with a given fully-qualified name
+ * (term name for modules, type name for classes). $TOPLEVEL_TREE.
+ * If such a tree does not exist, returns `EmptyTree`.
+ */
+ def topLevelDef(name: Name): Tree
+
+ /** Returns a reference to a top-level definition tree with a given fully-qualified name
+ * (term name for modules, type name for classes). $TOPLEVEL_TREE.
+ * If such a tree does not exist, returns `EmptyTree`.
+ */
+ def topLevelRef(name: Name): Tree
+
+ /** Adds a top-level definition to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
+ *
+ * Returns a fully-qualified reference to the introduced definition.
+ */
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: ImplDef): RefTree
+
+ /** Adds a list of top-level definitions to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
+ *
+ * Returns a list of fully-qualified references to the introduced definitions.
+ */
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: ImplDef*): List[RefTree]
+
+ /** A factory which can create a package def from a prototype and a list of declarations.
+ */
+ trait PackageSpec[T] { def mkPackageDef(prototype: T, stats: List[Tree]): PackageDef }
+
+ /** Hosts supported package specs.
+ */
+ object PackageSpec {
+ /** Package def can be created from a fully-qualified name and a list of definitions.
+ * The name is converted into an Ident or a chain of Selects.
+ */
+ implicit val stringIsPackageSpec = new PackageSpec[String] {
+ def mkPackageDef(prototype: String, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+
+ /** Package def can be created from a fully-qualified term name and a list of definitions.
+ * The name is converted into an Ident or a chain of Selects.
+ */
+ implicit val termNameIsPackageSpec = new PackageSpec[TermName] {
+ def mkPackageDef(prototype: TermName, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+
+ /** Package def can be created from a package id tree and a list of definitions.
+ * If the tree is not a valid package id, i.e. is not a term-name ident or a chain of term-name selects,
+ * then the produced PackageDef will fail compilation at some point in the future.
+ */
+ implicit val refTreeIsPackageSpec = new PackageSpec[RefTree] {
+ def mkPackageDef(prototype: RefTree, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+
+ /** Package def can be created from a package/package class symbol and a list of definitions.
+ * If the provided symbol is not a package symbol or a package class symbol, package construction will throw an exception.
+ */
+ implicit val SymbolIsPackageSpec = new PackageSpec[Symbol] {
+ def mkPackageDef(prototype: Symbol, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+ }
+
+ protected def mkPackageDef(name: String, stats: List[Tree]): PackageDef
+ protected def mkPackageDef(name: TermName, stats: List[Tree]): PackageDef
+ protected def mkPackageDef(tree: RefTree, stats: List[Tree]): PackageDef
+ protected def mkPackageDef(sym: Symbol, stats: List[Tree]): PackageDef
+}
diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala
index 204dc40858..19230010e6 100644
--- a/src/reflect/scala/reflect/macros/TreeBuilder.scala
+++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala
@@ -11,7 +11,6 @@ abstract class TreeBuilder {
val global: Universe
import global._
- import definitions._
/** Builds a reference to value whose type is given stable prefix.
* The type must be suitable for this. For example, it
@@ -28,19 +27,25 @@ abstract class TreeBuilder {
def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree
/** Builds a typed reference to given symbol with given stable prefix. */
- def mkAttributedRef(pre: Type, sym: Symbol): Tree
+ def mkAttributedRef(pre: Type, sym: Symbol): RefTree
/** Builds a typed reference to given symbol. */
- def mkAttributedRef(sym: Symbol): Tree
+ def mkAttributedRef(sym: Symbol): RefTree
+
+ /** Builds an untyped reference to given symbol. Requires the symbol to be static. */
+ def mkUnattributedRef(sym: Symbol): RefTree
+
+ /** Builds an untyped reference to symbol with given name. Requires the symbol to be static. */
+ def mkUnattributedRef(fullName: Name): RefTree
/** Builds a typed This reference to given symbol. */
- def mkAttributedThis(sym: Symbol): Tree
+ def mkAttributedThis(sym: Symbol): This
/** Builds a typed Ident with an underlying symbol. */
- def mkAttributedIdent(sym: Symbol): Tree
+ def mkAttributedIdent(sym: Symbol): RefTree
/** Builds a typed Select with an underlying symbol. */
- def mkAttributedSelect(qual: Tree, sym: Symbol): Tree
+ def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree
/** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
* There are a number of variations.
@@ -67,6 +72,6 @@ abstract class TreeBuilder {
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
- /** A tree that refers to the runtime reflexive universe, ``scala.reflect.runtime.universe''. */
+ /** A tree that refers to the runtime reflexive universe, `scala.reflect.runtime.universe`. */
def mkRuntimeUniverseRef: Tree
}
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index 427e7854b2..09a2373205 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -88,4 +88,4 @@ trait Typers {
/** Indicates an error during one of the methods in [[scala.reflect.macros.Typers]].
*/
-case class TypecheckException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class TypecheckException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 4e76f7c408..31f3192a85 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -114,7 +114,7 @@ abstract class Universe extends scala.reflect.api.Universe {
def setPos(newpos: Position): Tree
/** Sets the `tpe` of the tree. Returns `Unit`. */
- def tpe_=(t: Type): Unit
+ @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit
/** Sets the `tpe` of the tree. Returns the tree itself. */
def setType(tp: Type): Tree
@@ -238,4 +238,4 @@ abstract class Universe extends scala.reflect.api.Universe {
/** The AST that corresponds to this compilation unit. */
def body: Tree
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 3442e3d22e..c5c28ad3e9 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -17,12 +17,9 @@ import internal.ClassfileConstants._
import internal.pickling.UnPickler
import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
-//import scala.tools.nsc.util.ScalaClassLoader
-//import scala.tools.nsc.util.ScalaClassLoader._
import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance}
import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
-import scala.reflect.internal.util.Collections._
private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
@@ -285,6 +282,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (!symbol.isMutable) ErrorSetImmutableField(symbol)
jfield.set(receiver, value)
}
+ def bind(newReceiver: Any) = new JavaFieldMirror(newReceiver, symbol)
override def toString = s"field mirror for ${symbol.fullName} (bound to $receiver)"
}
@@ -321,7 +319,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
private def isBytecodelessMethod(meth: MethodSymbol): Boolean = {
- if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isTermMacro) return true
+ if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isMacro) return true
bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
}
@@ -332,7 +330,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
else if (symbol.paramss.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
- else new JavaVanillaMethodMirror(receiver, symbol)
+ else {
+ symbol.paramss.flatten.length match {
+ case 0 => new JavaVanillaMethodMirror0(receiver, symbol)
+ case 1 => new JavaVanillaMethodMirror1(receiver, symbol)
+ case 2 => new JavaVanillaMethodMirror2(receiver, symbol)
+ case 3 => new JavaVanillaMethodMirror3(receiver, symbol)
+ case 4 => new JavaVanillaMethodMirror4(receiver, symbol)
+ case _ => new JavaVanillaMethodMirror(receiver, symbol)
+ }
+ }
}
private abstract class JavaMethodMirror(val symbol: MethodSymbol)
@@ -343,8 +350,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
jmeth
}
+ def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+
def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = {
- val result = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+ val result = jinvokeraw(jmeth, receiver, args)
if (jmeth.getReturnType == java.lang.Void.TYPE) ()
else result
}
@@ -354,11 +363,43 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
+ def bind(newReceiver: Any) = new JavaVanillaMethodMirror(newReceiver, symbol)
def apply(args: Any*): Any = jinvoke(jmeth, receiver, args)
}
+ private class JavaVanillaMethodMirror0(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror0(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver)
+ }
+
+ private class JavaVanillaMethodMirror1(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror1(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror2(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror2(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror3(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror3(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror4(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror4(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
+ }
+
private class JavaByNameMethodMirror(val receiver: Any, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
+ def bind(newReceiver: Any) = new JavaByNameMethodMirror(newReceiver, symbol)
def apply(args: Any*): Any = {
val transformed = map2(args.toList, symbol.paramss.flatten)((arg, param) => if (isByNameParamType(param.info)) () => arg else arg)
jinvoke(jmeth, receiver, transformed)
@@ -367,6 +408,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
+ def bind(newReceiver: Any) = new JavaBytecodelessMethodMirror(newReceiver.asInstanceOf[T], symbol)
def apply(args: Any*): Any = {
// checking type conformance is too much of a hassle, so we don't do it here
// actually it's not even necessary, because we manually dispatch arguments below
@@ -378,7 +420,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val varargMatch = args.length >= params.length - 1 && isVarArgsList(params)
if (!perfectMatch && !varargMatch) {
val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}"
- var s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
+ val s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
throw new ScalaReflectionException(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments")
}
@@ -415,7 +457,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case sym if isStringConcat(sym) => receiver.toString + objArg0
case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod
case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
- case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
+ case sym if sym.isMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
case _ => abort(s"unsupported symbol $symbol when invoking $this")
}
}
@@ -423,6 +465,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaConstructorMirror(val outer: AnyRef, val symbol: MethodSymbol)
extends MethodMirror {
+ def bind(newReceiver: Any) = new JavaConstructorMirror(newReceiver.asInstanceOf[AnyRef], symbol)
override val receiver = outer
lazy val jconstr = {
val jconstr = constructorToJava(symbol)
@@ -464,11 +507,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def erasure = symbol.moduleClass.asClass
def isStatic = true
def instance = {
- if (symbol.owner.isPackageClass)
+ if (symbol.isTopLevel)
staticSingletonInstance(classLoader, symbol.fullName)
else
if (outer == null) staticSingletonInstance(classToJava(symbol.moduleClass.asClass))
- else innerSingletonInstance(outer, symbol.name)
+ else innerSingletonInstance(outer, symbol.name.toString)
}
override def toString = s"module mirror for ${symbol.fullName} (bound to $outer)"
}
@@ -495,13 +538,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
Class.forName(path, true, classLoader)
/** Does `path` correspond to a Java class with that fully qualified name in the current class loader? */
- def tryJavaClass(path: String): Option[jClass[_]] =
- try {
- Some(javaClass(path))
- } catch {
- case (_: ClassNotFoundException) | (_: NoClassDefFoundError) | (_: IncompatibleClassChangeError) =>
- None
- }
+ def tryJavaClass(path: String): Option[jClass[_]] = (
+ try Some(javaClass(path))
+ catch { case ex @ (_: LinkageError | _: ClassNotFoundException) => None } // TODO - log
+ )
/** The mirror that corresponds to the classloader that original defined the given Java class */
def mirrorDefining(jclazz: jClass[_]): JavaMirror = {
@@ -574,7 +614,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case None =>
// class does not have a Scala signature; it's a Java class
info("translating reflection info for Java " + jclazz) //debug
- initClassModule(clazz, module, new FromJavaClassCompleter(clazz, module, jclazz))
+ initClassAndModule(clazz, module, new FromJavaClassCompleter(clazz, module, jclazz))
}
}
} catch {
@@ -686,9 +726,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def enter(sym: Symbol, mods: Int) =
(if (jModifier.isStatic(mods)) module.moduleClass else clazz).info.decls enter sym
- for (jinner <- jclazz.getDeclaredClasses) {
- enter(jclassAsScala(jinner, clazz), jinner.getModifiers)
- }
+ for (jinner <- jclazz.getDeclaredClasses)
+ jclassAsScala(jinner) // inner class is entered as a side-effect
+ // no need to call enter explicitly
pendingLoadActions = { () =>
@@ -848,20 +888,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
/**
- * The Scala field corresponding to given Java field.
- * @param jfield The Java field
- * @return A Scala field object that corresponds to `jfield`.
- * // ??? should we return the getter instead?
- */
- def fieldToScala(jfield: jField): TermSymbol =
- toScala(fieldCache, jfield)(_ fieldToScala1 _)
-
- private def fieldToScala1(jfield: jField): TermSymbol = {
- val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers)
- (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm
- }
-
- /**
* The Scala package corresponding to given Java package
*/
def packageToScala(jpkg: jPackage): ModuleSymbol = packageCache.toScala(jpkg) {
@@ -1046,14 +1072,15 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* @param jclazz The Java class
* @return A Scala class symbol that wraps all reflection info of `jclazz`
*/
- private def jclassAsScala(jclazz: jClass[_]): Symbol = jclassAsScala(jclazz, sOwner(jclazz))
+ private def jclassAsScala(jclazz: jClass[_]): ClassSymbol =
+ toScala(classCache, jclazz)(_ jclassAsScala1 _)
- private def jclassAsScala(jclazz: jClass[_], owner: Symbol): ClassSymbol = {
+ private def jclassAsScala1(jclazz: jClass[_]): ClassSymbol = {
+ val owner = sOwner(jclazz)
val name = scalaSimpleName(jclazz)
val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz)
- val (clazz, module) = createClassModule(owner, name, completer)
- classCache enter (jclazz, clazz)
- clazz
+
+ initAndEnterClassAndModule(owner, name, completer)._1
}
/**
@@ -1118,7 +1145,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
constructorCache enter (jconstr, constr)
val tparams = jconstr.getTypeParameters.toList map createTypeParameter
val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala
- setMethType(constr, tparams, paramtpes, clazz.tpe)
+ setMethType(constr, tparams, paramtpes, clazz.tpe_*)
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
importPrivateWithinFromJavaFlags(constr, jconstr.getModifiers)
copyAnnotations(constr, jconstr)
@@ -1127,13 +1154,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
// -------------------- Scala to Java -----------------------------------
- /** Optionally, the Java package corresponding to a given Scala package, or None if no such Java package exists.
- * @param pkg The Scala package
- */
- def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) {
- Option(jPackage.getPackage(pkg.fullName.toString))
- }
-
/** The Java class corresponding to given Scala class.
* Note: This only works for
* - top-level classes
@@ -1149,11 +1169,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
valueClassToJavaType(clazz)
else if (clazz == ArrayClass)
noClass
- else if (clazz.owner.isPackageClass)
+ else if (clazz.isTopLevel)
javaClass(clazz.javaClassName)
else if (clazz.owner.isClass) {
val childOfClass = !clazz.owner.isModuleClass
- val childOfTopLevel = clazz.owner.owner.isPackageClass
+ val childOfTopLevel = clazz.owner.isTopLevel
val childOfTopLevelObject = clazz.owner.isModuleClass && childOfTopLevel
// suggested in https://issues.scala-lang.org/browse/SI-4023?focusedCommentId=54759#comment-54759
@@ -1178,7 +1198,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
else sym.name.toString
/** The Java field corresponding to a given Scala field.
- * @param meth The Scala field.
+ * @param fld The Scala field.
*/
def fieldToJava(fld: TermSymbol): jField = fieldCache.toJava(fld) {
val jclazz = classToJava(fld.owner.asClass)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index e18435d5b0..a130013398 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -1,8 +1,6 @@
package scala.reflect
package runtime
-import internal.{SomePhase, NoPhase, Phase, TreeGen}
-
/** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
*
* Should not be instantiated directly, use [[scala.reflect.runtime.universe]] instead.
@@ -11,13 +9,12 @@ import internal.{SomePhase, NoPhase, Phase, TreeGen}
*/
class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
- def picklerPhase = SomePhase
-
+ def inform(msg: String): Unit = log(msg)
+ def picklerPhase = internal.SomePhase
lazy val settings = new Settings
- def forInteractive = false
- def forScaladoc = false
+ private val isLogging = sys.props contains "scala.debug.reflect"
- def log(msg: => AnyRef): Unit = println(" [] "+msg)
+ def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg)
type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
@@ -25,4 +22,3 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
init()
}
-
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 7b093e0e80..aebaea40af 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -43,7 +43,7 @@ private[scala] object ReflectionUtils {
def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
if (clazz == null) return false
if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true
- return isAbstractFileClassLoader(clazz.getSuperclass)
+ isAbstractFileClassLoader(clazz.getSuperclass)
}
def inferClasspath(cl: ClassLoader): String = cl match {
case cl: java.net.URLClassLoader =>
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 0e0cf3fc40..5d58fa96d6 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -32,10 +32,11 @@ private[reflect] class Settings extends MutableSettings {
val Xexperimental = new BooleanSetting(false)
val XfullLubs = new BooleanSetting(false)
val XnoPatmatAnalysis = new BooleanSetting(false)
- val XoldPatmat = new BooleanSetting(false)
val Xprintpos = new BooleanSetting(false)
val Ynotnull = new BooleanSetting(false)
val Yshowsymkinds = new BooleanSetting(false)
+ val Yposdebug = new BooleanSetting(false)
+ val Yrangepos = new BooleanSetting(false)
val debug = new BooleanSetting(false)
val deepCloning = new BooleanSetting(false)
val explaintypes = new BooleanSetting(false)
@@ -43,6 +44,7 @@ private[reflect] class Settings extends MutableSettings {
val printtypes = new BooleanSetting(false)
val uniqid = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
+ val breakCycles = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 61663f6181..ea14e8ad43 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -28,7 +28,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
debugInfo("completing "+sym+"/"+clazz.fullName)
assert(sym == clazz || sym == module || sym == module.moduleClass)
// try {
- atPhaseNotLaterThan(picklerPhase) {
+ enteringPhaseNotLaterThan(picklerPhase) {
val loadingMirror = mirrorThatLoaded(sym)
val javaClass = loadingMirror.javaClass(clazz.javaClassName)
loadingMirror.unpickleClass(clazz, module, javaClass)
@@ -57,7 +57,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* @param name The simple name of the newly created class
* @param completer The completer to be used to set the info of the class and the module
*/
- protected def createClassModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
+ protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
assert(!(name.toString endsWith "[]"), name)
val clazz = owner.newClass(name)
val module = owner.newModule(name.toTermName)
@@ -67,7 +67,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
owner.info.decls enter clazz
owner.info.decls enter module
}
- initClassModule(clazz, module, completer(clazz, module))
+ initClassAndModule(clazz, module, completer(clazz, module))
(clazz, module)
}
@@ -75,7 +75,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
List(clazz, module, module.moduleClass) foreach (_ setInfo info)
}
- protected def initClassModule(clazz: Symbol, module: Symbol, completer: LazyType) =
+ protected def initClassAndModule(clazz: Symbol, module: Symbol, completer: LazyType) =
setAllInfos(clazz, module, completer)
/** The type completer for packages.
@@ -116,9 +116,9 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
currentMirror.tryJavaClass(path) match {
case Some(cls) =>
val loadingMirror = currentMirror.mirrorDefining(cls)
- val (clazz, module) =
+ val (_, module) =
if (loadingMirror eq currentMirror) {
- createClassModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
+ initAndEnterClassAndModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
} else {
val origOwner = loadingMirror.packageNameToScala(pkgClass.fullName)
val clazz = origOwner.info decl name.toTypeName
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 00f6952dc1..1154927279 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -83,9 +83,6 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol =
new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags
- override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
- new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
-
override protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags
@@ -118,7 +115,8 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
override def name_=(x: Name) = synchronized { super.name_=(x) }
override def rawname = synchronized { super.rawname }
override def typeConstructor: Type = synchronized { super.typeConstructor }
- override def tpe: Type = synchronized { super.tpe }
+ override def tpe_* : Type = synchronized { super.tpe_* }
+ override def tpeHK : Type = synchronized { super.tpeHK }
}
trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index b97913daf0..eadbc0c52e 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -6,7 +6,7 @@ package scala.reflect
package object runtime {
/** The entry point into Scala runtime reflection.
- *
+ *
* To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._`
*
* See [[scala.reflect.api.Universe]] or the
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/repl/scala/tools/nsc/Interpreter.scala
index 434f19f21b..434f19f21b 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/repl/scala/tools/nsc/Interpreter.scala
diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/repl/scala/tools/nsc/InterpreterLoop.scala
index a0be3f4fdb..a0be3f4fdb 100644
--- a/src/compiler/scala/tools/nsc/InterpreterLoop.scala
+++ b/src/repl/scala/tools/nsc/InterpreterLoop.scala
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala
index e4a20b4a8c..7195424cf9 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala
@@ -5,12 +5,9 @@
package scala.tools.nsc
-import java.net.URL
-import scala.tools.util.PathResolver
import io.{ File }
import util.{ ClassPath, ScalaClassLoader }
import Properties.{ versionString, copyrightString }
-import interpreter.{ ILoop }
import GenericRunnerCommand._
object JarRunner extends CommonRunner {
@@ -80,7 +77,7 @@ class MainGenericRunner {
Right(false)
case _ =>
// We start the repl when no arguments are given.
- Right(new ILoop process settings)
+ Right(new interpreter.ILoop process settings)
}
/** If -e and -i were both given, we want to execute the -e code after the
diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
new file mode 100644
index 0000000000..712219533d
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -0,0 +1,7 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.io.AbstractFile
+
+@deprecated("Use `scala.tools.nsc.util.AbstractFileClassLoader`", "2.11.0")
+class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) extends util.AbstractFileClassLoader(root, parent)
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
index e66e4eff29..e66e4eff29 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/repl/scala/tools/nsc/interpreter/ByteCode.scala
index 40e9d3d600..e1e3678837 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ByteCode.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package interpreter
import java.lang.reflect
-import java.util.concurrent.ConcurrentHashMap
import util.ScalaClassLoader
import ScalaClassLoader.appLoader
import scala.reflect.NameTransformer._
@@ -29,35 +28,5 @@ object ByteCode {
method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
}
- /** Scala sig bytes.
- */
- def scalaSigBytesForPath(path: String) =
- for {
- module <- DECODER
- method <- decoderMethod("scalaSigAnnotationBytes", classOf[String])
- names <- method.invoke(module, path).asInstanceOf[Option[Array[Byte]]]
- }
- yield names
-
- /** Attempts to retrieve case parameter names for given class name.
- */
- def caseParamNamesForPath(path: String) =
- for {
- module <- DECODER
- method <- decoderMethod("caseParamNames", classOf[String])
- names <- method.invoke(module, path).asInstanceOf[Option[List[String]]]
- }
- yield names
-
def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg))
-
- /** Attempts to find type aliases in package objects.
- */
- def aliasForType(path: String): Option[String] = {
- val (pkg, name) = (path lastIndexOf '.') match {
- case -1 => return None
- case idx => (path take idx, path drop (idx + 1))
- }
- aliasesForPackage(pkg) flatMap (_ get name)
- }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
index 8042f0aee2..0ab92ab769 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
@@ -10,5 +10,4 @@ package interpreter
*/
class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) {
override def cmdName = "scala"
- override lazy val fileEndings = List(".scalaint")
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala
index 1dfccbfbf7..84a5cb49ae 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Completion.scala
@@ -23,8 +23,6 @@ object NoCompletion extends Completion {
}
object Completion {
- def empty: Completion = NoCompletion
-
case class Candidates(cursor: Int, candidates: List[String]) { }
val NoCandidates = Candidates(-1, Nil)
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
index ab96f415db..3dd5d93390 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.reflect.NameTransformer
-
/** An interface for objects which are aware of tab completion and
* will supply their own candidates and resolve their own paths.
*/
@@ -53,31 +51,3 @@ trait CompletionAware {
results.sorted
}
}
-
-object CompletionAware {
- val Empty = new CompletionAware { def completions(verbosity: Int) = Nil }
-
- def unapply(that: Any): Option[CompletionAware] = that match {
- case x: CompletionAware => Some((x))
- case _ => None
- }
-
- /** Create a CompletionAware object from the given functions.
- * The first should generate the list of completions whenever queried,
- * and the second should return Some(CompletionAware) object if
- * subcompletions are possible.
- */
- def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware =
- new CompletionAware {
- def completions = terms()
- def completions(verbosity: Int) = completions
- override def follow(id: String) = followFunction(id)
- }
-
- /** Convenience factories.
- */
- def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
- def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware =
- apply(() => map.keys.toList, map.get _)
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
index d14b5c79e0..d24ad60974 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -38,7 +38,6 @@ trait CompletionOutput {
def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString)
- def relativize(sym: Symbol): String = relativize(sym.info)
def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
@@ -76,7 +75,7 @@ trait CompletionOutput {
}
def methodString() =
- method.keyString + " " + method.nameString + (method.info.normalize match {
+ method.keyString + " " + method.nameString + (method.info.dealiasWiden match {
case NullaryMethodType(resType) => ": " + typeToString(resType)
case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType)
case mt @ MethodType(_, _) => methodTypeToString(mt)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
index 07e36f4f27..48af261937 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
@@ -7,19 +7,12 @@ package scala.tools.nsc
package interpreter
import scala.tools.jline.console.{ ConsoleReader, CursorBuffer }
-import scala.tools.jline.console.completer.CompletionHandler
-import Completion._
trait ConsoleReaderHelper extends ConsoleReader {
- def currentLine = "" + getCursorBuffer.buffer
- def currentPos = getCursorBuffer.cursor
def terminal = getTerminal()
def width = terminal.getWidth()
def height = terminal.getHeight()
- def paginate = isPaginationEnabled()
- def paginate_=(value: Boolean) = setPaginationEnabled(value)
- def goBack(num: Int): Unit
def readOneKey(prompt: String): Int
def eraseLine(): Unit
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
index 80debfacb9..e88a044931 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
@@ -26,7 +26,6 @@ trait Delimited {
def delimited: Char => Boolean
def escapeChars: List[Char] = List('\\')
- def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"'))
/** Break String into args based on delimiting function.
*/
@@ -39,6 +38,4 @@ trait Delimited {
def isDelimiterChar(ch: Char) = delimited(ch)
def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
- def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch
- def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index c4a672ac37..9edd54b939 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.ast.parser.Tokens.EOF
trait ExprTyper {
@@ -15,10 +14,11 @@ trait ExprTyper {
import repl._
import global.{ reporter => _, Import => _, _ }
import definitions._
- import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
+ import syntaxAnalyzer.UnitParser
import naming.freshInternalVarName
- object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
+ object codeParser {
+ val global: repl.global.type = repl.global
def applyRule[T](code: String, rule: UnitParser => T): T = {
reporter.reset()
val scanner = newUnitParser(code)
@@ -29,11 +29,7 @@ trait ExprTyper {
result
}
-
- def defns(code: String) = stmts(code) collect { case x: DefTree => x }
- def expr(code: String) = applyRule(code, _.expr())
def stmts(code: String) = applyRule(code, _.templateStats())
- def stmt(code: String) = stmts(code).last // guaranteed nonempty
}
/** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
@@ -46,10 +42,6 @@ trait ExprTyper {
else Some(trees)
}
}
- // def parsesAsExpr(line: String) = {
- // import codeParser._
- // (opt expr line).isDefined
- // }
def symbolOfLine(code: String): Symbol = {
def asExpr(): Symbol = {
@@ -57,15 +49,13 @@ trait ExprTyper {
// Typing it with a lazy val would give us the right type, but runs
// into compiler bugs with things like existentials, so we compile it
// behind a def and strip the NullaryMethodType which wraps the expr.
- val line = "def " + name + " = {\n" + code + "\n}"
+ val line = "def " + name + " = " + code
interpretSynthetic(line) match {
case IR.Success =>
val sym0 = symbolOfTerm(name)
// drop NullaryMethodType
- val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
- if (sym.info.typeSymbol eq UnitClass) NoSymbol
- else sym
+ sym0.cloneSymbol setInfo exitingTyper(sym0.info.finalResultType)
case _ => NoSymbol
}
}
@@ -82,7 +72,11 @@ trait ExprTyper {
case _ => NoSymbol
}
}
- beQuietDuring(asExpr()) orElse beQuietDuring(asDefn())
+ def asError(): Symbol = {
+ interpretSynthetic(code)
+ NoSymbol
+ }
+ beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
}
private var typeOfExpressionDepth = 0
diff --git a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
index 43e653edfd..43e653edfd 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index b7e07ecdd6..599a061984 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -8,24 +8,19 @@ package interpreter
import Predef.{ println => _, _ }
import java.io.{ BufferedReader, FileReader }
-import java.util.concurrent.locks.ReentrantLock
-import scala.sys.process.Process
import session._
-import scala.util.Properties.{ jdkHome, javaVersion }
-import scala.tools.util.{ Javap }
import scala.annotation.tailrec
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.ops
+import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-import interpreter._
import io.{ File, Directory }
-import scala.reflect.NameTransformer._
import util.ScalaClassLoader
import ScalaClassLoader._
import scala.tools.util._
import scala.language.{implicitConversions, existentials}
-import scala.reflect.{ClassTag, classTag}
-import scala.tools.reflect.StdRuntimeTags._
+import scala.reflect.classTag
+import StdReplTags._
+import scala.concurrent.{ ExecutionContext, Await, Future, future }
+import ExecutionContext.Implicits._
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -42,77 +37,41 @@ import scala.tools.reflect.StdRuntimeTags._
class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
extends AnyRef
with LoopCommands
- with ILoopInit
{
def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
def this() = this(None, new JPrintWriter(Console.out, true))
- var in: InteractiveReader = _ // the input stream from which commands come
- var settings: Settings = _
- var intp: IMain = _
-
@deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
@deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
- /** Having inherited the difficult "var-ness" of the repl instance,
- * I'm trying to work around it by moving operations into a class from
- * which it will appear a stable prefix.
- */
- private def onIntp[T](f: IMain => T): T = f(intp)
-
- class IMainOps[T <: IMain](val intp: T) {
- import intp._
- import global._
-
- def printAfterTyper(msg: => String) =
- intp.reporter printUntruncatedMessage afterTyper(msg)
-
- /** Strip NullaryMethodType artifacts. */
- private def replInfo(sym: Symbol) = {
- sym.info match {
- case NullaryMethodType(restpe) if sym.isAccessor => restpe
- case info => info
- }
- }
- def echoTypeStructure(sym: Symbol) =
- printAfterTyper("" + deconstruct.show(replInfo(sym)))
+ var in: InteractiveReader = _ // the input stream from which commands come
+ var settings: Settings = _
+ var intp: IMain = _
- def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
- if (verbose) ILoop.this.echo("// Type signature")
- printAfterTyper("" + replInfo(sym))
+ private var globalFuture: Future[Boolean] = _
- if (verbose) {
- ILoop.this.echo("\n// Internal Type structure")
- echoTypeStructure(sym)
- }
- }
+ /** Print a welcome message */
+ def printWelcome() {
+ echo(s"""
+ |Welcome to Scala $versionString ($javaVmName, Java $javaVersion).
+ |Type in expressions to have them evaluated.
+ |Type :help for more information.""".trim.stripMargin
+ )
+ replinfo("[info] started at " + new java.util.Date)
}
- implicit def stabilizeIMain(intp: IMain) = new IMainOps[intp.type](intp)
- /** TODO -
- * -n normalize
- * -l label with case class parameter names
- * -c complete - leave nothing out
- */
- private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
- onIntp { intp =>
- val sym = intp.symbolOfLine(expr)
- if (sym.exists) intp.echoTypeSignature(sym, verbose)
- else ""
- }
+ protected def asyncMessage(msg: String) {
+ if (isReplInfo || isReplPower)
+ echoAndRefresh(msg)
}
override def echoCommandMessage(msg: String) {
intp.reporter printUntruncatedMessage msg
}
- def isAsync = !settings.Yreplsync.value
lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
def history = in.history
- /** The context class loader at the time this object was created */
- protected val originalClassLoader = Thread.currentThread.getContextClassLoader
-
// classpath entries added via :cp
var addedClasspath: String = ""
@@ -166,20 +125,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def helpCommand(line: String): Result = {
if (line == "") helpSummary()
else uniqueCommand(line) match {
- case Some(lc) => echo("\n" + lc.longHelp)
+ case Some(lc) => echo("\n" + lc.help)
case _ => ambiguousError(line)
}
}
private def helpSummary() = {
val usageWidth = commands map (_.usageMsg.length) max
- val formatStr = "%-" + usageWidth + "s %s %s"
+ val formatStr = "%-" + usageWidth + "s %s"
echo("All commands can be abbreviated, e.g. :he instead of :help.")
- echo("Those marked with a * have more detailed help, e.g. :help imports.\n")
commands foreach { cmd =>
- val star = if (cmd.hasLongHelp) "*" else " "
- echo(formatStr.format(cmd.usageMsg, star, cmd.help))
+ echo(formatStr.format(cmd.usageMsg, cmd.help))
}
}
private def ambiguousError(cmd: String): Result = {
@@ -187,7 +144,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case Nil => echo(cmd + ": no such command. Type :help for help.")
case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
}
- Result(true, None)
+ Result(keepRunning = true, None)
}
private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
private def uniqueCommand(cmd: String): Option[LoopCommand] = {
@@ -229,10 +186,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
out println msg
out.flush()
}
- protected def echoNoNL(msg: String) = {
- out print msg
- out.flush()
- }
/** Search the history */
def searchHistory(_cmdline: String) {
@@ -243,8 +196,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
echo("%d %s".format(index + offset, line))
}
- private var currentPrompt = Properties.shellPromptString
- def setPrompt(prompt: String) = currentPrompt = prompt
+ private val currentPrompt = Properties.shellPromptString
+
/** Prompt to print when awaiting input */
def prompt = currentPrompt
@@ -257,12 +210,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
historyCommand,
cmd("h?", "<string>", "search the history", searchHistory),
cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
- cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
+ cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand),
cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
nullary("power", "enable power user mode", powerCmd),
- nullary("quit", "exit the interpreter", () => Result(false, None)),
+ nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
nullary("replay", "reset execution and replay all previous commands", replay),
nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
shCommand,
@@ -276,25 +229,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
)
- private def dumpCommand(): Result = {
- echo("" + power)
- history.asStrings takeRight 30 foreach echo
- in.redrawLine()
- }
- private def valsCommand(): Result = power.valsDescription
-
- private val typeTransforms = List(
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic.",
- "java.lang." -> "jl.",
- "scala.runtime." -> "runtime."
- )
-
private def importsCommand(line: String): Result = {
val tokens = words(line)
val handlers = intp.languageWildcardHandlers ++ intp.importHandlers
- val isVerbose = tokens contains "-v"
handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
case (handler, idx) =>
@@ -316,66 +253,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- private def implicitsCommand(line: String): Result = onIntp { intp =>
- import intp._
- import global._
-
- def p(x: Any) = intp.reporter.printMessage("" + x)
-
- // If an argument is given, only show a source with that
- // in its name somewhere.
- val args = line split "\\s+"
- val filtered = intp.implicitSymbolsBySource filter {
- case (source, syms) =>
- (args contains "-v") || {
- if (line == "") (source.fullName.toString != "scala.Predef")
- else (args exists (source.name.toString contains _))
- }
- }
-
- if (filtered.isEmpty)
- return "No implicits have been imported other than those in Predef."
-
- filtered foreach {
- case (source, syms) =>
- p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
-
- // This groups the members by where the symbol is defined
- val byOwner = syms groupBy (_.owner)
- val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
-
- sortedOwners foreach {
- case (owner, members) =>
- // Within each owner, we cluster results based on the final result type
- // if there are more than a couple, and sort each cluster based on name.
- // This is really just trying to make the 100 or so implicits imported
- // by default into something readable.
- val memberGroups: List[List[Symbol]] = {
- val groups = members groupBy (_.tpe.finalResultType) toList
- val (big, small) = groups partition (_._2.size > 3)
- val xss = (
- (big sortBy (_._1.toString) map (_._2)) :+
- (small flatMap (_._2))
- )
-
- xss map (xs => xs sortBy (_.name.toString))
- }
-
- val ownerMessage = if (owner == source) " defined in " else " inherited from "
- p(" /* " + members.size + ownerMessage + owner.fullName + " */")
-
- memberGroups foreach { group =>
- group foreach (s => p(" " + intp.symbolDefString(s)))
- p("")
- }
- }
- p("")
- }
- }
-
private def findToolsJar() = {
val jdkPath = Directory(jdkHome)
- val jar = jdkPath / "lib" / "tools.jar" toFile;
+ val jar = jdkPath / "lib" / "tools.jar" toFile
if (jar isFile)
Some(jar)
@@ -384,7 +264,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else None
}
private def addToolsJarToLoader() = {
- val cl = findToolsJar match {
+ val cl = findToolsJar() match {
case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
case _ => intp.classLoader
}
@@ -398,42 +278,17 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) {
- override def tryClass(path: String): Array[Byte] = {
- val hd :: rest = path split '.' toList;
- // If there are dots in the name, the first segment is the
- // key to finding it.
- if (rest.nonEmpty) {
- intp optFlatName hd match {
- case Some(flat) =>
- val clazz = flat :: rest mkString NAME_JOIN_STRING
- val bytes = super.tryClass(clazz)
- if (bytes.nonEmpty) bytes
- else super.tryClass(clazz + MODULE_SUFFIX_STRING)
- case _ => super.tryClass(path)
- }
- }
- else {
- // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
- // we have to drop the $ to find object Foo, then tack it back onto
- // the end of the flattened name.
- def className = intp flatName path
- def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING
-
- val bytes = super.tryClass(className)
- if (bytes.nonEmpty) bytes
- else super.tryClass(moduleName)
- }
- }
- }
+ protected def newJavap() =
+ JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(intp))
+
private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
// Still todo: modules.
private def typeCommand(line0: String): Result = {
line0.trim match {
case "" => ":type [-v] <expression>"
- case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
- case s => typeCommandInternal(s, false)
+ case s if s startsWith "-v " => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = true)
+ case s => intp.typeCommandInternal(s, verbose = false)
}
}
@@ -447,8 +302,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def javapCommand(line: String): Result = {
if (javap == null)
":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
- else if (javaVersion startsWith "1.7")
- ":javap not yet working with java 1.7"
else if (line == "")
":javap [-lcsvp] [path1 path2 ...]"
else
@@ -458,37 +311,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- private def wrapCommand(line: String): Result = {
- def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
- onIntp { intp =>
- import intp._
- import global._
+ private def pathToPhaseWrapper = intp.originalPath("$r") + ".phased.atCurrent"
- words(line) match {
- case Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => "Current execution wrapper: " + s
- }
- case "clear" :: Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
- }
- case wrapper :: Nil =>
- intp.typeOfExpression(wrapper) match {
- case PolyType(List(targ), MethodType(List(arg), restpe)) =>
- intp setExecutionWrapper intp.pathToTerm(wrapper)
- "Set wrapper to '" + wrapper + "'"
- case tp =>
- failMsg + "\nFound: <unknown>"
- }
- case _ => failMsg
- }
- }
- }
-
- private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
private def phaseCommand(name: String): Result = {
val phased: Phased = power.phased
import phased.NoPhaseName
@@ -547,33 +371,30 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
true
}
+ // return false if repl should exit
+ def processLine(line: String): Boolean = {
+ import scala.concurrent.duration._
+ Await.ready(globalFuture, 60.seconds)
+
+ (line ne null) && (command(line) match {
+ case Result(false, _) => false
+ case Result(_, Some(line)) => addReplay(line) ; true
+ case _ => true
+ })
+ }
+
+ private def readOneLine() = {
+ out.flush()
+ in readLine prompt
+ }
+
/** The main read-eval-print loop for the repl. It calls
* command() for each line of input, and stops when
* command() returns false.
*/
- def loop() {
- def readOneLine() = {
- out.flush()
- in readLine prompt
- }
- // return false if repl should exit
- def processLine(line: String): Boolean = {
- if (isAsync) {
- if (!awaitInitialized()) return false
- runThunks()
- }
- if (line eq null) false // assume null means EOF
- else command(line) match {
- case Result(false, _) => false
- case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
- case _ => true
- }
- }
- def innerLoop() {
- if ( try processLine(readOneLine()) catch crashRecovery )
- innerLoop()
- }
- innerLoop()
+ @tailrec final def loop() {
+ if ( try processLine(readOneLine()) catch crashRecovery )
+ loop()
}
/** interpret all lines from a specified file */
@@ -581,7 +402,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
savingReader {
savingReplayStack {
file applyReader { reader =>
- in = SimpleReader(reader, out, false)
+ in = SimpleReader(reader, out, interactive = false)
echo("Loading " + file + "...")
loop()
}
@@ -645,7 +466,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
interpretAllFrom(f)
shouldReplay = Some(":load " + arg)
})
- Result(true, shouldReplay)
+ Result(keepRunning = true, shouldReplay)
}
def addClasspath(arg: String): Unit = {
@@ -661,7 +482,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def powerCmd(): Result = {
if (isReplPower) "Already in power mode."
- else enablePowerMode(false)
+ else enablePowerMode(isDuringInit = false)
}
def enablePowerMode(isDuringInit: Boolean) = {
replProps.power setValue true
@@ -698,8 +519,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case _ => ambiguousError(cmd)
}
}
- else if (intp.global == null) Result(false, None) // Notice failure to create compiler
- else Result(true, interpretStartingWith(line))
+ else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler
+ else Result(keepRunning = true, interpretStartingWith(line))
}
private def readWhile(cond: String => Boolean) = {
@@ -819,48 +640,39 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
SimpleReader()
}
}
- def process(settings: Settings): Boolean = savingContextLoader {
- this.settings = settings
- createInterpreter()
- // sets in to some kind of reader depending on environmental cues
- in = in0 match {
- case Some(reader) => SimpleReader(reader, out, true)
- case None =>
- // some post-initialization
- chooseReader(settings) match {
- case x: JLineReader => addThunk(x.consoleReader.postInit) ; x
- case x => x
- }
+ private def loopPostInit() {
+ in match {
+ case x: JLineReader => x.consoleReader.postInit
+ case _ =>
}
// Bind intp somewhere out of the regular namespace where
// we can get at it in generated code.
- addThunk(intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])))
- addThunk({
- import scala.tools.nsc.io._
- import Properties.userHome
- import scala.compat.Platform.EOL
- val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
- if (autorun.isDefined) intp.quietRun(autorun.get)
- })
-
- loadFiles(settings)
- // it is broken on startup; go ahead and exit
- if (intp.reporter.hasErrors)
- return false
-
- // This is about the illusion of snappiness. We call initialize()
- // which spins off a separate thread, then print the prompt and try
- // our best to look ready. The interlocking lazy vals tend to
- // inter-deadlock, so we break the cycle with a single asynchronous
- // message to an actor.
- if (isAsync) {
- intp initialize initializedCallback()
- createAsyncListener() // listens for signal to run postInitialization
+ intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain]))
+ // Auto-run code via some setting.
+ ( replProps.replAutorunCode.option
+ flatMap (f => io.File(f).safeSlurp())
+ foreach (intp quietRun _)
+ )
+ // classloader and power mode setup
+ intp.setContextClassLoader()
+ if (isReplPower) {
+ replProps.power setValue true
+ unleashAndSetPhase()
+ asyncMessage(power.banner)
}
- else {
+ }
+ def process(settings: Settings): Boolean = savingContextLoader {
+ this.settings = settings
+ createInterpreter()
+
+ // sets in to some kind of reader depending on environmental cues
+ in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true))
+ globalFuture = future {
intp.initializeSynchronous()
- postInitialization()
+ loopPostInit()
+ loadFiles(settings)
+ !intp.reporter.hasErrors
}
printWelcome()
@@ -871,27 +683,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
true
}
- /** process command-line arguments and do as they request */
- def process(args: Array[String]): Boolean = {
- val command = new CommandLine(args.toList, echo)
- def neededHelp(): String =
- (if (command.settings.help.value) command.usageMsg + "\n" else "") +
- (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
-
- // if they asked for no help and command is valid, we call the real main
- neededHelp() match {
- case "" => command.ok && process(command.settings)
- case help => echoNoNL(help) ; true
- }
- }
-
@deprecated("Use `process` instead", "2.9.0")
- def main(settings: Settings): Unit = process(settings)
+ def main(settings: Settings): Unit = process(settings) //used by sbt
}
object ILoop {
implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp
- private def echo(msg: String) = Console println msg
// Designed primarily for use by test code: take a String with a
// bunch of code, and prints out a transcript of what it would look
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index bed8570bd0..c92777c13e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -11,36 +11,24 @@ import util.stringFromWriter
import scala.reflect.internal.util._
import java.net.URL
import scala.sys.BooleanProp
-import io.VirtualDirectory
import scala.tools.nsc.io.AbstractFile
import reporters._
-import symtab.Flags
-import scala.reflect.internal.Names
import scala.tools.util.PathResolver
import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
import scala.collection.{ mutable, immutable }
-import scala.util.control.Exception.{ ultimately }
+import scala.reflect.BeanProperty
+import scala.util.Properties.versionString
+import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, SimpleBindings, CompiledScript, Compilable}
+import java.io.{ StringWriter, Reader }
+import java.util.Arrays
import IMain._
import java.util.concurrent.Future
-import typechecker.Analyzer
-import scala.language.implicitConversions
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
-import scala.tools.reflect.StdRuntimeTags._
-
-/** directory to save .class files to */
-private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("(memory)", None) {
- private def pp(root: AbstractFile, indentLevel: Int) {
- val spaces = " " * indentLevel
- out.println(spaces + root.name)
- if (root.isDirectory)
- root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
- }
- // print the contents hierarchically
- def show() = pp(this, 0)
-}
+import StdReplTags._
/** An interpreter for Scala code.
*
@@ -74,19 +62,22 @@ private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("
* @author Moez A. Abdel-Gawad
* @author Lex Spoon
*/
-class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports {
+class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Settings, protected val out: JPrintWriter) extends AbstractScriptEngine(new SimpleBindings) with Compilable with Imports {
imain =>
- /** Leading with the eagerly evaluated.
- */
- val virtualDirectory: VirtualDirectory = new ReplVirtualDirectory(out) // "directory" for classfiles
- private var currentSettings: Settings = initialSettings
- private[nsc] var printResults = true // whether to print result lines
- private[nsc] var totalSilence = false // whether to print anything
- private var _initializeComplete = false // compiler is initialized
- private var _isInitialized: Future[Boolean] = null // set up initialization future
- private var bindExceptions = true // whether to bind the lastException variable
- private var _executionWrapper = "" // code to be wrapped around all lines
+ object replOutput extends ReplOutput(settings.Yreploutdir) { }
+
+ @deprecated("Use replOutput.dir instead", "2.11.0")
+ def virtualDirectory = replOutput.dir
+ // Used in a test case.
+ def showDirectory() = replOutput.show(out)
+
+ private[nsc] var printResults = true // whether to print result lines
+ private[nsc] var totalSilence = false // whether to print anything
+ private var _initializeComplete = false // compiler is initialized
+ private var _isInitialized: Future[Boolean] = null // set up initialization future
+ private var bindExceptions = true // whether to bind the lastException variable
+ private var _executionWrapper = "" // code to be wrapped around all lines
/** We're going to go to some trouble to initialize the compiler asynchronously.
* It's critical that nothing call into it until it's been initialized or we will
@@ -95,23 +86,14 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* use a lazy val to ensure that any attempt to use the compiler object waits
* on the future.
*/
- private var _classLoader: AbstractFileClassLoader = null // active classloader
- private val _compiler: Global = newCompiler(settings, reporter) // our private compiler
-
- private val nextReqId = {
- var counter = 0
- () => { counter += 1 ; counter }
- }
+ private var _classLoader: util.AbstractFileClassLoader = null // active classloader
+ private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
def compilerClasspath: Seq[URL] = (
if (isInitializeComplete) global.classPath.asURLs
else new PathResolver(settings).result.asURLs // the compiler's classpath
)
- def settings = currentSettings
- def mostRecentLine = prevRequestList match {
- case Nil => ""
- case req :: _ => req.originalLine
- }
+ def settings = initialSettings
// Run the code body with the given boolean settings flipped to true.
def withoutWarnings[T](body: => T): T = beQuietDuring {
val saved = settings.nowarn.value
@@ -123,15 +105,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
/** construct an interpreter that reports to Console */
+ def this(settings: Settings, out: JPrintWriter) = this(null, settings, out)
+ def this(factory: ScriptEngineFactory, settings: Settings) = this(factory, settings, new NewLinePrintWriter(new ConsoleWriter, true))
def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ def this(factory: ScriptEngineFactory) = this(factory, new Settings())
def this() = this(new Settings())
- lazy val repllog: Logger = new Logger {
- val out: JPrintWriter = imain.out
- val isInfo: Boolean = BooleanProp keyExists "scala.repl.info"
- val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug"
- val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace"
- }
lazy val formatting: Formatting = new Formatting {
val prompt = Properties.shellPromptString
}
@@ -153,6 +132,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
catch AbstractOrMissingHandler()
}
private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
+ private val logScope = scala.sys.props contains "scala.repl.scope"
+ private def scopelog(msg: String) = if (logScope) Console.err.println(msg)
// argument is a thunk to execute after init is done
def initialize(postInitSignal: => Unit) {
@@ -173,29 +154,28 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
def isInitializeComplete = _initializeComplete
- /** the public, go through the future compiler */
lazy val global: Global = {
- if (isInitializeComplete) _compiler
- else {
- // If init hasn't been called yet you're on your own.
- if (_isInitialized == null) {
- repldbg("Warning: compiler accessed before init set up. Assuming no postInit code.")
- initialize(())
- }
- // blocks until it is ; false means catastrophic failure
- if (_isInitialized.get()) _compiler
- else null
- }
+ if (!isInitializeComplete) _initialize()
+ _compiler
}
- @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
- lazy val compiler: global.type = global
import global._
- import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember}
- import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass}
+ import definitions.{ ObjectClass, termMember, dropNullaryMethod}
+
+ lazy val runtimeMirror = ru.runtimeMirror(classLoader)
+
+ private def noFatal(body: => Symbol): Symbol = try body catch { case _: FatalError => NoSymbol }
+
+ def getClassIfDefined(path: String) = (
+ noFatal(runtimeMirror staticClass path)
+ orElse noFatal(rootMirror staticClass path)
+ )
+ def getModuleIfDefined(path: String) = (
+ noFatal(runtimeMirror staticModule path)
+ orElse noFatal(rootMirror staticModule path)
+ )
implicit class ReplTypeOps(tp: Type) {
- def orElse(other: => Type): Type = if (tp ne NoType) tp else other
def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
}
@@ -208,10 +188,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
// make sure we don't overwrite their unwisely named res3 etc.
def freshUserTermName(): TermName = {
val name = newTermName(freshUserVarName())
- if (definedNameMap contains name) freshUserTermName()
+ if (replScope containsName name) freshUserTermName()
else name
}
- def isUserTermName(name: Name) = isUserVarName("" + name)
def isInternalTermName(name: Name) = isInternalVarName("" + name)
}
import naming._
@@ -260,11 +239,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Instantiate a compiler. Overridable. */
protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
- settings.outputDirs setSingleOutput virtualDirectory
+ settings.outputDirs setSingleOutput replOutput.dir
settings.exposeEmptyPackage.value = true
- new Global(settings, reporter) with ReplGlobal {
- override def toString: String = "<global>"
- }
+ new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
}
/** Parent classloader. Overridable. */
@@ -293,57 +270,75 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
if (_classLoader == null)
_classLoader = makeClassLoader()
}
- def classLoader: AbstractFileClassLoader = {
+ def classLoader: util.AbstractFileClassLoader = {
ensureClassLoader()
_classLoader
}
- private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
+
+ def backticked(s: String): String = (
+ (s split '.').toList map {
+ case "_" => "_"
+ case s if nme.keywords(newTermName(s)) => s"`$s`"
+ case s => s
+ } mkString "."
+ )
+
+ abstract class PhaseDependentOps {
+ def shift[T](op: => T): T
+
+ def path(name: => Name): String = shift(path(symbolOfName(name)))
+ def path(sym: Symbol): String = backticked(shift(sym.fullName))
+ def sig(sym: Symbol): String = shift(sym.defString)
+ }
+ object typerOp extends PhaseDependentOps {
+ def shift[T](op: => T): T = exitingTyper(op)
+ }
+ object flatOp extends PhaseDependentOps {
+ def shift[T](op: => T): T = exitingFlatten(op)
+ }
+
+ def originalPath(name: String): String = originalPath(name: TermName)
+ def originalPath(name: Name): String = typerOp path name
+ def originalPath(sym: Symbol): String = typerOp path sym
+ def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
+ def translatePath(path: String) = {
+ val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
+ sym match {
+ case NoSymbol => None
+ case _ => Some(flatPath(sym))
+ }
+ }
+ def translateEnclosingClass(n: String) = {
+ def enclosingClass(s: Symbol): Symbol =
+ if (s == NoSymbol || s.isClass) s else enclosingClass(s.owner)
+ enclosingClass(symbolOfTerm(n)) match {
+ case NoSymbol => None
+ case c => Some(flatPath(c))
+ }
+ }
+
+ private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
/** Overridden here to try translating a simple name to the generated
* class name if the original attempt fails. This method is used by
* getResourceAsStream as well as findClass.
*/
- override protected def findAbstractFile(name: String): AbstractFile = {
+ override protected def findAbstractFile(name: String): AbstractFile =
super.findAbstractFile(name) match {
- // deadlocks on startup if we try to translate names too early
- case null if isInitializeComplete =>
- generatedName(name) map (x => super.findAbstractFile(x)) orNull
- case file =>
- file
+ case null => translatePath(name) map (super.findAbstractFile(_)) orNull
+ case file => file
}
- }
}
- private def makeClassLoader(): AbstractFileClassLoader =
+ private def makeClassLoader(): util.AbstractFileClassLoader =
new TranslatingClassLoader(parentClassLoader match {
case null => ScalaClassLoader fromURLs compilerClasspath
case p => new URLClassLoader(compilerClasspath, p)
})
- def getInterpreterClassLoader() = classLoader
-
// Set the current Java "context" class loader to this interpreter's class loader
def setContextClassLoader() = classLoader.setAsContext()
- /** Given a simple repl-defined name, returns the real name of
- * the class representing it, e.g. for "Bippy" it may return
- * {{{
- * $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
- * }}}
- */
- def generatedName(simpleName: String): Option[String] = {
- if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING)
- else optFlatName(simpleName)
- }
- def flatName(id: String) = optFlatName(id) getOrElse id
- def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
-
- def allDefinedNames = definedNameMap.keys.toList.sorted
- def pathToType(id: String): String = pathToName(newTypeName(id))
- def pathToTerm(id: String): String = pathToName(newTermName(id))
- def pathToName(name: Name): String = {
- if (definedNameMap contains name)
- definedNameMap(name) fullPath name
- else name.toString
- }
+ def allDefinedNames: List[Name] = exitingTyper(replScope.toList.map(_.name).sorted)
+ def unqualifiedIds: List[String] = allDefinedNames map (_.decode) sorted
/** Most recent tree handled which wasn't wholly synthetic. */
private def mostRecentlyHandledTree: Option[Tree] = {
@@ -356,51 +351,47 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
None
}
- /** Stubs for work in progress. */
- def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
- for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
- repldbg("Redefining type '%s'\n %s -> %s".format(name, t1, t2))
- }
- }
+ private def updateReplScope(sym: Symbol, isDefined: Boolean) {
+ def log(what: String) {
+ val mark = if (sym.isType) "t " else "v "
+ val name = exitingTyper(sym.nameString)
+ val info = cleanTypeAfterTyper(sym)
+ val defn = sym defStringSeenAs info
- def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
- for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
- // Printing the types here has a tendency to cause assertion errors, like
- // assertion failed: fatal: <refinement> has owner value x, but a class owner is required
- // so DBG is by-name now to keep it in the family. (It also traps the assertion error,
- // but we don't want to unnecessarily risk hosing the compiler's internal state.)
- repldbg("Redefining term '%s'\n %s -> %s".format(name, t1, t2))
+ scopelog(f"[$mark$what%6s] $name%-25s $defn%s")
}
+ if (ObjectClass isSubClass sym.owner) return
+ // unlink previous
+ replScope lookupAll sym.name foreach { sym =>
+ log("unlink")
+ replScope unlink sym
+ }
+ val what = if (isDefined) "define" else "import"
+ log(what)
+ replScope enter sym
}
def recordRequest(req: Request) {
- if (req == null || referencedNameMap == null)
+ if (req == null)
return
prevRequests += req
- req.referencedNames foreach (x => referencedNameMap(x) = req)
// warning about serially defining companions. It'd be easy
// enough to just redefine them together but that may not always
// be what people want so I'm waiting until I can do it better.
- for {
- name <- req.definedNames filterNot (x => req.definedNames contains x.companionName)
- oldReq <- definedNameMap get name.companionName
- newSym <- req.definedSymbols get name
- oldSym <- oldReq.definedSymbols get name.companionName
- if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
- } {
- afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
- replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
- }
-
- // Updating the defined name map
- req.definedNames foreach { name =>
- if (definedNameMap contains name) {
- if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
- else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
+ exitingTyper {
+ req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym =>
+ val oldSym = replScope lookup newSym.name.companionName
+ if (Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }) {
+ replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.")
+ replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
+ }
}
- definedNameMap(name) = req
+ }
+ exitingTyper {
+ req.imports foreach (sym => updateReplScope(sym, isDefined = false))
+ req.defines foreach (sym => updateReplScope(sym, isDefined = true))
}
}
@@ -409,19 +400,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
printMessage(msg)
}
- def isParseable(line: String): Boolean = {
- beSilentDuring {
- try parse(line) match {
- case Some(xs) => xs.nonEmpty // parses as-is
- case None => true // incomplete
- }
- catch { case x: Exception => // crashed the compiler
- replwarn("Exception in isParseable(\"" + line + "\"): " + x)
- false
- }
- }
- }
-
def compileSourcesKeepingRun(sources: SourceFile*) = {
val run = new Run()
reporter.reset()
@@ -448,18 +426,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
executingRequest
}
- // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because
- // the close brace is commented out. Strip single-line comments.
- // ... but for error message output reasons this is not used, and rather than
- // enclosing in braces it is constructed like "val x =\n5 // foo".
- private def removeComments(line: String): String = {
- showCodeIfDebugging(line) // as we're about to lose our // show
- line.lines map (s => s indexOf "//" match {
- case -1 => s
- case idx => s take idx
- }) mkString "\n"
- }
-
private def safePos(t: Tree, alt: Int): Int =
try t.pos.startOrPoint
catch { case _: UnsupportedOperationException => alt }
@@ -551,8 +517,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
Right(buildRequest(line, trees))
}
- // normalize non-public types so we don't see protected aliases like Self
- def normalizeNonPublic(tp: Type) = tp match {
+ // dealias non-public types so we don't see protected aliases like Self
+ def dealiasNonPublic(tp: Type) = tp match {
case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
case _ => tp
}
@@ -565,11 +531,84 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* The return value is whether the line was interpreter successfully,
* e.g. that there were no parse errors.
*/
- def interpret(line: String): IR.Result = interpret(line, false)
- def interpretSynthetic(line: String): IR.Result = interpret(line, true)
- def interpret(line: String, synthetic: Boolean): IR.Result = {
- def loadAndRunReq(req: Request) = {
- classLoader.setAsContext()
+ def interpret(line: String): IR.Result = interpret(line, synthetic = false)
+ def interpretSynthetic(line: String): IR.Result = interpret(line, synthetic = true)
+ def interpret(line: String, synthetic: Boolean): IR.Result = compile(line, synthetic) match {
+ case Left(result) => result
+ case Right(req) => new WrappedRequest(req).loadAndRunReq
+ }
+
+ private def compile(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ if (global == null) Left(IR.Error)
+ else requestFromLine(line, synthetic) match {
+ case Left(result) => Left(result)
+ case Right(req) =>
+ // null indicates a disallowed statement type; otherwise compile and
+ // fail if false (implying e.g. a type error)
+ if (req == null || !req.compile) Left(IR.Error) else Right(req)
+ }
+ }
+
+ var code = ""
+ var bound = false
+ @throws(classOf[ScriptException])
+ def compile(script: String): CompiledScript = {
+ if (!bound) {
+ quietBind("bindings", getBindings(ScriptContext.ENGINE_SCOPE))
+ bound = true
+ }
+ val cat = code + script
+ compile(cat, false) match {
+ case Left(result) => result match {
+ case IR.Incomplete => {
+ code = cat + "\n"
+ new CompiledScript {
+ def eval(context: ScriptContext): Object = null
+ def getEngine: ScriptEngine = IMain.this
+ }
+ }
+ case _ => {
+ code = ""
+ throw new ScriptException("compile-time error")
+ }
+ }
+ case Right(req) => {
+ code = ""
+ new WrappedRequest(req)
+ }
+ }
+ }
+
+ @throws(classOf[ScriptException])
+ def compile(reader: Reader): CompiledScript = {
+ val writer = new StringWriter()
+ var c = reader.read()
+ while(c != -1) {
+ writer.write(c)
+ c = reader.read()
+ }
+ reader.close()
+ compile(writer.toString())
+ }
+
+ private class WrappedRequest(val req: Request) extends CompiledScript {
+ var recorded = false
+
+ @throws(classOf[ScriptException])
+ def eval(context: ScriptContext): Object = {
+ val result = req.lineRep.evalEither match {
+ case Left(e: Exception) => throw new ScriptException(e)
+ case Left(_) => throw new ScriptException("run-time error")
+ case Right(result) => result.asInstanceOf[Object]
+ }
+ if (!recorded) {
+ recordRequest(req)
+ recorded = true
+ }
+ result
+ }
+
+ def loadAndRunReq = classLoader.asContext {
val (result, succeeded) = req.loadAndRun
/** To our displeasure, ConsoleReporter offers only printMessage,
@@ -594,15 +633,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
}
- if (global == null) IR.Error
- else requestFromLine(line, synthetic) match {
- case Left(result) => result
- case Right(req) =>
- // null indicates a disallowed statement type; otherwise compile and
- // fail if false (implying e.g. a type error)
- if (req == null || !req.compile) IR.Error
- else loadAndRunReq(req)
- }
+ def getEngine: ScriptEngine = IMain.this
}
/** Bind a specified name to a specified value. The name may
@@ -615,7 +646,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
*/
def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
val bindRep = new ReadEvalPrint()
- val run = bindRep.compile("""
+ bindRep.compile("""
|object %s {
| var value: %s = _
| def set(x: Any) = value = x.asInstanceOf[%s]
@@ -645,24 +676,15 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def rebind(p: NamedParam): IR.Result = {
val name = p.name
- val oldType = typeOfTerm(name) orElse { return IR.Error }
val newType = p.tpe
val tempName = freshInternalVarName()
quietRun("val %s = %s".format(tempName, name))
quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
}
- def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*))
- def addImports(ids: String*): IR.Result =
- if (ids.isEmpty) IR.Success
- else interpret("import " + ids.mkString(", "))
-
def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
- def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x)
- def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
- def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
/** Reset this interpreter, forgetting all user-specified requests. */
def reset() {
@@ -670,9 +692,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
resetClassLoader()
resetAllCreators()
prevRequests.clear()
- referencedNameMap.clear()
- definedNameMap.clear()
- virtualDirectory.clear()
+ resetReplScope()
+ replOutput.dir.clear()
}
/** This instance is no longer needed, so release any resources
@@ -693,10 +714,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
class ReadEvalPrint(lineId: Int) {
def this() = this(freshLineId())
- private var lastRun: Run = _
- private var evalCaught: Option[Throwable] = None
- private var conditionalWarnings: List[ConditionalWarning] = Nil
-
val packageName = sessionNames.line + lineId
val readName = sessionNames.read
val evalName = sessionNames.eval
@@ -723,7 +740,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def readPath = pathTo(readName)
def evalPath = pathTo(evalName)
- def printPath = pathTo(printName)
def call(name: String, args: Any*): AnyRef = {
val m = evalMethod(name)
@@ -738,10 +754,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
try Right(call(name, args: _*))
catch { case ex: Throwable => Left(ex) }
- def callOpt(name: String, args: Any*): Option[AnyRef] =
- try Some(call(name, args: _*))
- catch { case ex: Throwable => bindError(ex) ; None }
-
class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { }
private def evalError(path: String, ex: Throwable) =
@@ -753,9 +765,13 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
lazy val evalClass = load(evalPath)
- lazy val evalValue = callEither(resultName) match {
- case Left(ex) => evalCaught = Some(ex) ; None
- case Right(result) => Some(result)
+
+ def evalEither = callEither(resultName) match {
+ case Left(ex) => ex match {
+ case ex: NullPointerException => Right(null)
+ case ex => Left(unwrap(ex))
+ }
+ case Right(result) => Right(result)
}
def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
@@ -764,10 +780,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* following accessPath into the outer one.
*/
def resolvePathToSymbol(accessPath: String): Symbol = {
- val readRoot = getRequiredModule(readPath) // the outermost wrapper
+ val readRoot = getModuleIfDefined(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot: Symbol) {
case (sym, "") => sym
- case (sym, name) => afterTyper(termMember(sym, name))
+ case (sym, name) => exitingTyper(termMember(sym, name))
}
}
/** We get a bunch of repeated warnings for reasons I haven't
@@ -793,6 +809,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
mostRecentWarnings = warnings
}
private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
+ case Array() => null
case Array(method) => method
case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
}
@@ -800,15 +817,16 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
showCodeIfDebugging(code)
val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
updateRecentWarnings(run)
- lastRun = run
success
}
}
/** One line of code submitted by the user for interpretation */
- // private
class Request(val line: String, val trees: List[Tree]) {
- val reqId = nextReqId()
+ def defines = defHandlers flatMap (_.definedSymbols)
+ def imports = importedSymbols
+ def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol
+
val lineRep = new ReadEvalPrint()
private var _originalLine: String = null
@@ -819,50 +837,31 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
def defHandlers = handlers collect { case x: MemberDefHandler => x }
- /** all (public) names defined by these statements */
- val definedNames = handlers flatMap (_.definedNames)
-
/** list of names used by this expression */
val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
/** def and val names */
def termNames = handlers flatMap (_.definesTerm)
def typeNames = handlers flatMap (_.definesType)
- def definedOrImported = handlers flatMap (_.definedOrImported)
- def definedSymbolList = defHandlers flatMap (_.definedSymbols)
-
- def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
- def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
+ def importedSymbols = handlers flatMap {
+ case x: ImportHandler => x.importedSymbols
+ case _ => Nil
+ }
/** Code to import bound names from previous lines - accessPath is code to
* append to objectName to access anything bound by request.
*/
val ComputedImports(importsPreamble, importsTrailer, accessPath) =
- importsCode(referencedNames.toSet)
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: String) = (
- lineRep.readPath + accessPath + ".`%s`".format(vname)
- )
- /** Same as fullpath, but after it has been flattened, so:
- * $line5.$iw.$iw.$iw.Bippy // fullPath
- * $line5.$iw$$iw$$iw$Bippy // fullFlatName
- */
- def fullFlatName(name: String) =
- lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
-
- /** The unmangled symbol name, but supplemented with line info. */
- def disambiguated(name: Name): String = name + " (in " + lineRep + ")"
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: Name): String = fullPath(vname.toString)
+ exitingTyper(importsCode(referencedNames.toSet))
/** the line of code to compute */
def toCompute = line
+ def fullPath(vname: String) = s"${lineRep.readPath}$accessPath.`$vname`"
+
/** generate the source code for the object that computes this request */
private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
- def path = pathToTerm("$intp")
+ def path = originalPath("$intp")
def envLines = {
if (!isReplPower) Nil // power mode only for now
// $intp is not bound; punt, but include the line.
@@ -872,8 +871,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
)
else List(
"def $line = " + tquoted(originalLine),
- "def $req = %s.requestForReqId(%s).orNull".format(path, reqId),
- "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId)
+ "def $trees = Nil"
)
}
@@ -889,13 +887,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** We only want to generate this code when the result
* is a value which can be referred to as-is.
*/
- val evalResult =
- if (!handlers.last.definesValue) ""
- else handlers.last.definesTerm match {
- case Some(vname) if typeOf contains vname =>
- "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
- case _ => ""
- }
+ val evalResult = Request.this.value match {
+ case NoSymbol => ""
+ case sym => "lazy val %s = %s".format(lineRep.resultName, originalPath(sym))
+ }
// first line evaluates object to make sure constructor is run
// initial "" so later code can uniformly be: + etc
val preamble = """
@@ -917,15 +912,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val generate = (m: MemberHandler) => m resultExtractionCode Request.this
}
- // get it
- def getEvalTyped[T] : Option[T] = getEval map (_.asInstanceOf[T])
- def getEval: Option[AnyRef] = {
- // ensure it has been compiled
- compile
- // try to load it and call the value method
- lineRep.evalValue filterNot (_ == null)
- }
-
/** Compile the object file. Returns whether the compilation succeeded.
* If all goes well, the "types" map is computed. */
lazy val compile: Boolean = {
@@ -944,7 +930,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val name = dh.member.name
definedSymbols get name foreach { sym =>
dh.member setSymbol sym
- repldbg("Set symbol of " + name + " to " + sym.defString)
+ repldbg("Set symbol of " + name + " to " + symbolDefString(sym))
}
}
@@ -954,11 +940,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
- def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+ def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name)))
/* typeOf lookup with encoding */
def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
- def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
private def typeMap[T](f: Type => T) =
mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
@@ -966,11 +951,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Types of variables defined by this request. */
lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
/** String representations of same. */
- lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString))
+ lazy val typeOf = typeMap[String](tp => exitingTyper(tp.toString))
- // lazy val definedTypes: Map[Name, Type] = {
- // typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
- // }
lazy val definedSymbols = (
termNames.map(x => x -> applyToResultMember(x, x => x)) ++
typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
@@ -987,6 +969,16 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
}
+ def createBindings: Bindings = new SimpleBindings
+
+ @throws(classOf[ScriptException])
+ def eval(script: String, context: ScriptContext): Object = compile(script).eval(context)
+
+ @throws(classOf[ScriptException])
+ def eval(reader: Reader, context: ScriptContext): Object = compile(reader).eval(context)
+
+ override def finalize = close
+
/** Returns the name of the most recent interpreter result.
* Mostly this exists so you can conveniently invoke methods on
* the previous result.
@@ -1003,45 +995,48 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private var mostRecentWarnings: List[(global.Position, String)] = Nil
def lastWarnings = mostRecentWarnings
- def treesForRequestId(id: Int): List[Tree] =
- requestForReqId(id).toList flatMap (_.trees)
-
- def requestForReqId(id: Int): Option[Request] =
- if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest)
- else prevRequests find (_.reqId == id)
+ private lazy val importToGlobal = global mkImporter ru
+ private lazy val importToRuntime = ru mkImporter global
+ private lazy val javaMirror = ru.rootMirror match {
+ case x: ru.JavaMirror => x
+ case _ => null
+ }
+ private implicit def importFromRu(sym: ru.Symbol): Symbol = importToGlobal importSymbol sym
+ private implicit def importToRu(sym: Symbol): ru.Symbol = importToRuntime importSymbol sym
- def requestForName(name: Name): Option[Request] = {
- assert(definedNameMap != null, "definedNameMap is null")
- definedNameMap get name
+ def classOfTerm(id: String): Option[JClass] = symbolOfTerm(id) match {
+ case NoSymbol => None
+ case sym => Some(javaMirror runtimeClass importToRu(sym).asClass)
}
- def requestForIdent(line: String): Option[Request] =
- requestForName(newTermName(line)) orElse requestForName(newTypeName(line))
+ def typeOfTerm(id: String): Type = symbolOfTerm(id).tpe
- def requestHistoryForName(name: Name): List[Request] =
- prevRequests.toList.reverse filter (_.definedNames contains name)
+ def valueOfTerm(id: String): Option[Any] = exitingTyper {
+ def value() = {
+ val sym0 = symbolOfTerm(id)
+ val sym = (importToRuntime importSymbol sym0).asTerm
+ val module = runtimeMirror.reflectModule(sym.owner.companionSymbol.asModule).instance
+ val module1 = runtimeMirror.reflect(module)
+ val invoker = module1.reflectField(sym)
- def definitionForName(name: Name): Option[MemberHandler] =
- requestForName(name) flatMap { req =>
- req.handlers find (_.definedNames contains name)
+ invoker.get
}
- def valueOfTerm(id: String): Option[AnyRef] =
- requestForName(newTermName(id)) flatMap (_.getEval)
-
- def classOfTerm(id: String): Option[JClass] =
- valueOfTerm(id) map (_.getClass)
-
- def typeOfTerm(id: String): Type = newTermName(id) match {
- case nme.ROOTPKG => RootClass.tpe
- case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
+ try Some(value()) catch { case _: Exception => None }
}
- def symbolOfType(id: String): Symbol =
- requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
+ /** It's a bit of a shotgun approach, but for now we will gain in
+ * robustness. Try a symbol-producing operation at phase typer, and
+ * if that is NoSymbol, try again at phase flatten. I'll be able to
+ * lose this and run only from exitingTyper as soon as I figure out
+ * exactly where a flat name is sneaking in when calculating imports.
+ */
+ def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op)
- def symbolOfTerm(id: String): Symbol =
- requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
+ def symbolOfIdent(id: String): Symbol = symbolOfType(id) orElse symbolOfTerm(id)
+ def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName))
+ def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName))
+ def symbolOfName(id: Name): Symbol = replScope lookup id
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
classOfTerm(id) flatMap { clazz =>
@@ -1062,14 +1057,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
else NoType
}
}
- def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper {
- normalizeNonPublic {
- owner.info.nonPrivateDecl(member).tpe match {
- case NullaryMethodType(tp) => tp
- case tp => tp
- }
- }
+
+ def cleanTypeAfterTyper(sym: => Symbol): Type = {
+ exitingTyper(
+ dealiasNonPublic(
+ dropNullaryMethod(
+ sym.tpe_*
+ )
+ )
+ )
}
+ def cleanMemberDecl(owner: Symbol, member: Name): Type =
+ cleanTypeAfterTyper(owner.info nonPrivateDecl member)
object exprTyper extends {
val repl: IMain.this.type = imain
@@ -1083,64 +1082,35 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def typeOfExpression(expr: String, silent: Boolean = true): Type =
exprTyper.typeOfExpression(expr, silent)
- protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
- protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
+ protected def onlyTerms(xs: List[Name]): List[TermName] = xs collect { case x: TermName => x }
+ protected def onlyTypes(xs: List[Name]): List[TypeName] = xs collect { case x: TypeName => x }
def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
def definedTypes = onlyTypes(allDefinedNames)
- def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
- def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
+ def definedSymbolList = prevRequestList flatMap (_.defines) filterNot (s => isInternalTermName(s.name))
// Terms with user-given names (i.e. not res0 and not synthetic)
def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
- private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
-
- /** Translate a repl-defined identifier into a Symbol.
- */
- def apply(name: String): Symbol =
- types(name) orElse terms(name)
-
- def types(name: String): Symbol = {
- val tpname = newTypeName(name)
- findName(tpname) orElse getClassIfDefined(tpname)
+ private var _replScope: Scope = _
+ private def resetReplScope() {
+ _replScope = newScope
}
- def terms(name: String): Symbol = {
- val termname = newTypeName(name)
- findName(termname) orElse getModuleIfDefined(termname)
- }
- // [Eugene to Paul] possibly you could make use of TypeTags here
- def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName)
- def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName)
- def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName)
+ def replScope = {
+ if (_replScope eq null)
+ _replScope = newScope
- def classSymbols = allDefSymbols collect { case x: ClassSymbol => x }
- def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
+ _replScope
+ }
- /** the previous requests this interpreter has processed */
private var executingRequest: Request = _
private val prevRequests = mutable.ListBuffer[Request]()
- private val referencedNameMap = mutable.Map[Name, Request]()
- private val definedNameMap = mutable.Map[Name, Request]()
private val directlyBoundNames = mutable.Set[Name]()
- def allHandlers = prevRequestList flatMap (_.handlers)
- def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
- def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
-
- def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
- def prevRequestList = prevRequests.toList
- def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
- def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
- def importHandlers = allHandlers collect { case x: ImportHandler => x }
-
- def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
-
- /** Another entry point for tab-completion, ids in scope */
- def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted
-
- /** Parse the ScalaSig to find type aliases */
- def aliasForType(path: String) = ByteCode.aliasForType(path)
+ def allHandlers = prevRequestList flatMap (_.handlers)
+ def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
+ def prevRequestList = prevRequests.toList
+ def importHandlers = allHandlers collect { case x: ImportHandler => x }
def withoutUnwrapping(op: => Unit): Unit = {
val saved = isettings.unwrapStrings
@@ -1151,7 +1121,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def symbolDefString(sym: Symbol) = {
TypeStrings.quieter(
- afterTyper(sym.defString),
+ exitingTyper(sym.defString),
sym.owner.name + ".this.",
sym.owner.fullName + "."
)
@@ -1161,13 +1131,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Secret bookcase entrance for repl debuggers: end the line
* with "// show" and see what's going on.
*/
- def isShow = code.lines exists (_.trim endsWith "// show")
- def isShowRaw = code.lines exists (_.trim endsWith "// raw")
-
- // old style
- beSilentDuring(parse(code)) foreach { ts =>
- ts foreach { t =>
- withoutUnwrapping(repldbg(asCompactString(t)))
+ def isShow = code.lines exists (_.trim endsWith "// show")
+ if (isReplDebug || isShow) {
+ beSilentDuring(parse(code)) foreach { ts =>
+ ts foreach { t =>
+ withoutUnwrapping(echo(asCompactString(t)))
+ }
}
}
}
@@ -1181,6 +1150,48 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Utility methods for the Interpreter. */
object IMain {
+ class Factory extends ScriptEngineFactory {
+ @BeanProperty
+ val engineName = "Scala Interpreter"
+
+ @BeanProperty
+ val engineVersion = "1.0"
+
+ @BeanProperty
+ val extensions: JList[String] = Arrays.asList("scala")
+
+ @BeanProperty
+ val languageName = "Scala"
+
+ @BeanProperty
+ val languageVersion = versionString
+
+ def getMethodCallSyntax(obj: String, m: String, args: String*): String = null
+
+ @BeanProperty
+ val mimeTypes: JList[String] = Arrays.asList("application/x-scala")
+
+ @BeanProperty
+ val names: JList[String] = Arrays.asList("scala")
+
+ def getOutputStatement(toDisplay: String): String = null
+
+ def getParameter(key: String): Object = key match {
+ case ScriptEngine.ENGINE => engineName
+ case ScriptEngine.ENGINE_VERSION => engineVersion
+ case ScriptEngine.LANGUAGE => languageName
+ case ScriptEngine.LANGUAGE_VERSION => languageVersion
+ case ScriptEngine.NAME => names.get(0)
+ case _ => null
+ }
+
+ def getProgram(statements: String*): String = null
+
+ def getScriptEngine: ScriptEngine = new IMain(this, new Settings() {
+ usemanifestcp.value = true
+ })
+ }
+
// The two name forms this is catching are the two sides of this assignment:
//
// $line3.$read.$iw.$iw.Bippy =
diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
index a8f77afcdf..9541d08db1 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
@@ -12,13 +12,6 @@ package interpreter
* @author Lex Spoon, 2007/3/24
**/
class ISettings(intp: IMain) {
- /** A list of paths where :load should look */
- var loadPath = List(".")
-
- /** Set this to true to see repl machinery under -Yrich-exceptions.
- */
- var showInternalStackTraces = false
-
/** The maximum length of toString to use when printing the result
* of an evaluation. 0 means no maximum. If a printout requires
* more than this number of characters, then the printout is
@@ -32,7 +25,7 @@ class ISettings(intp: IMain) {
var maxAutoprintCompletion = 250
/** String unwrapping can be disabled if it is causing issues.
- * Settings this to false means you will see Strings like "$iw.$iw.".
+ * Setting this to false means you will see Strings like "$iw.$iw.".
*/
var unwrapStrings = true
@@ -44,7 +37,7 @@ class ISettings(intp: IMain) {
}
def deprecation: Boolean = intp.settings.deprecation.value
- def allSettings = Map(
+ def allSettings = Map[String, Any](
"maxPrintString" -> maxPrintString,
"maxAutoprintCompletion" -> maxAutoprintCompletion,
"unwrapStrings" -> unwrapStrings,
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index 73d962b5b0..ff7bfd432c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -12,12 +12,9 @@ trait Imports {
self: IMain =>
import global._
- import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
+ import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule }
import memberHandlers._
- def isNoImports = settings.noimports.value
- def isNoPredef = settings.nopredef.value
-
/** Synthetic import handlers for the language defined imports. */
private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
val hd :: tl = sym.fullName.split('.').toList map newTermName
@@ -31,12 +28,9 @@ trait Imports {
/** Symbols whose contents are language-defined to be imported. */
def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
- def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
def allImportedNames = importHandlers flatMap (_.importedNames)
- def importedTerms = onlyTerms(allImportedNames)
- def importedTypes = onlyTypes(allImportedNames)
/** Types which have been wildcard imported, such as:
* val x = "abc" ; import x._ // type java.lang.String
@@ -52,17 +46,11 @@ trait Imports {
def sessionWildcards: List[Type] = {
importHandlers filter (_.importsWildcard) map (_.targetType) distinct
}
- def wildcardTypes = languageWildcards ++ sessionWildcards
def languageSymbols = languageWildcardSyms flatMap membersAtPickler
def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
def importedSymbols = languageSymbols ++ sessionImportedSymbols
def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x }
- def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
- def implicitSymbols = importedSymbols filter (_.isImplicit)
-
- def importedTermNamed(name: String): Symbol =
- importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
/** Tuples of (source, imported symbols) in the order they were imported.
*/
@@ -146,44 +134,42 @@ trait Imports {
code append "object %s {\n".format(impname)
trailingBraces append "}\n"
accessPath append ("." + impname)
-
- currentImps.clear
+ currentImps.clear()
+ }
+ def maybeWrap(names: Name*) = if (names exists currentImps) addWrapper()
+ def wrapBeforeAndAfter[T](op: => T): T = {
+ addWrapper()
+ try op finally addWrapper()
}
-
- addWrapper()
// loop through previous requests, adding imports for each one
- for (ReqAndHandler(req, handler) <- reqsToUse) {
- handler match {
- // If the user entered an import, then just use it; add an import wrapping
- // level if the import might conflict with some other import
- case x: ImportHandler =>
- if (x.importsWildcard || currentImps.exists(x.importedNames contains _))
- addWrapper()
-
- code append (x.member + "\n")
-
- // give wildcard imports a import wrapper all to their own
- if (x.importsWildcard) addWrapper()
- else currentImps ++= x.importedNames
-
- // For other requests, import each defined name.
- // import them explicitly instead of with _, so that
- // ambiguity errors will not be generated. Also, quote
- // the name of the variable, so that we don't need to
- // handle quoting keywords separately.
- case x =>
- for (imv <- x.definedNames) {
- if (currentImps contains imv) addWrapper()
-
- code append ("import " + (req fullPath imv) + "\n")
- currentImps += imv
- }
+ wrapBeforeAndAfter {
+ for (ReqAndHandler(req, handler) <- reqsToUse) {
+ handler match {
+ // If the user entered an import, then just use it; add an import wrapping
+ // level if the import might conflict with some other import
+ case x: ImportHandler if x.importsWildcard =>
+ wrapBeforeAndAfter(code append (x.member + "\n"))
+ case x: ImportHandler =>
+ maybeWrap(x.importedNames: _*)
+ code append (x.member + "\n")
+ currentImps ++= x.importedNames
+
+ // For other requests, import each defined name.
+ // import them explicitly instead of with _, so that
+ // ambiguity errors will not be generated. Also, quote
+ // the name of the variable, so that we don't need to
+ // handle quoting keywords separately.
+ case x =>
+ for (sym <- x.definedSymbols) {
+ maybeWrap(sym.name)
+ code append s"import ${x.path}\n"
+ currentImps += sym.name
+ }
+ }
}
}
- // add one extra wrapper, to prevent warnings in the common case of
- // redefining the value bound in the last interpreter request.
- addWrapper()
+
ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
}
@@ -191,5 +177,5 @@ trait Imports {
prevRequestList flatMap (req => req.handlers map (req -> _))
private def membersAtPickler(sym: Symbol): List[Symbol] =
- beforePickler(sym.info.nonPrivateMembers.toList)
+ enteringPickler(sym.info.nonPrivateMembers.toList)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
index 8331fddca6..28ddf2939c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package interpreter
import java.io.IOException
-import java.nio.channels.ClosedByInterruptException
-import scala.util.control.Exception._
import session.History
import InteractiveReader._
import Properties.isMac
@@ -17,22 +15,16 @@ import Properties.isMac
trait InteractiveReader {
val interactive: Boolean
- def init(): Unit
def reset(): Unit
-
def history: History
def completion: Completion
- def eraseLine(): Unit
def redrawLine(): Unit
- def currentLine: String
def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
case 'y' => true
case 'n' => false
case _ => alt
}
- def readAssumingNo(prompt: String) = readYesOrNo(prompt, false)
- def readAssumingYes(prompt: String) = readYesOrNo(prompt, true)
protected def readOneLine(prompt: String): String
protected def readOneKey(prompt: String): Int
@@ -52,6 +44,6 @@ object InteractiveReader {
def apply(): InteractiveReader = SimpleReader()
@deprecated("Use `apply` instead.", "2.9.0")
- def createDefault(): InteractiveReader = apply()
+ def createDefault(): InteractiveReader = apply() // used by sbt
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
index 219cb35242..19fa562234 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -6,10 +6,9 @@
package scala.tools.nsc
package interpreter
-import scala.tools.jline._
-import scala.tools.jline.console.completer._
import Completion._
import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.StringOps.longestCommonPrefix
// REPL completor - queries supplied interpreter for valid
// completions based on current contents of buffer.
@@ -29,9 +28,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (isModule) getModuleIfDefined(name)
else getModuleIfDefined(name)
)
- def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
- def typeOf(name: String) = getType(name, false)
- def moduleOf(name: String) = getType(name, true)
trait CompilerCompletion {
def tp: Type
@@ -47,12 +43,11 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
def tos(sym: Symbol): String = sym.decodedName
- def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
- def hasMethod(s: String) = memberNamed(s).isMethod
+ def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s))
// XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
// compiler to crash for reasons not yet known.
- def members = afterTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
+ def members = exitingTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
def methods = members.toList filter (_.isMethod)
def packages = members.toList filter (_.isPackage)
def aliases = members.toList filter (_.isAliasType)
@@ -111,7 +106,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
def methodSignatureString(sym: Symbol) = {
- IMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
+ IMain stripString exitingTyper(new MethodSymbolOutput(sym).methodString())
}
def exclude(name: String): Boolean = (
@@ -280,10 +275,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (parsed.isEmpty) xs map ("." + _) else xs
}
- // generic interface for querying (e.g. interpreter loop, testing)
- def completions(buf: String): List[String] =
- topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
-
def completer(): ScalaCompleter = new JLineTabCompletion
/** This gets a little bit hairy. It's no small feat delegating everything
@@ -301,16 +292,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def isConsecutiveTabs(buf: String, cursor: Int) =
cursor == lastCursor && buf == lastBuf
- // Longest common prefix
- def commonPrefix(xs: List[String]): String = {
- if (xs.isEmpty || xs.contains("")) ""
- else xs.head.head match {
- case ch =>
- if (xs.tail forall (_.head == ch)) "" + ch + commonPrefix(xs map (_.tail))
- else ""
- }
- }
-
// This is jline's entry point for completion.
override def complete(buf: String, cursor: Int): Candidates = {
verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
@@ -324,7 +305,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
val newCursor =
if (winners contains "") p.cursor
else {
- val advance = commonPrefix(winners)
+ val advance = longestCommonPrefix(winners)
lastCursor = p.position + advance.length
lastBuf = (buf take p.position) + advance
repldbg("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
@@ -335,8 +316,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
Some(Candidates(newCursor, winners))
}
- def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
- def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity
+ def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
// a single dot is special cased to completion on the previous result
def lastResultCompletion =
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
index 5fd5b41625..5d41f1bbb4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
@@ -9,9 +9,7 @@ package interpreter
import scala.tools.jline.console.ConsoleReader
import scala.tools.jline.console.completer._
import session._
-import scala.collection.JavaConverters._
import Completion._
-import io.Streamable.slurp
/**
* Reads from the console using JLine.
@@ -25,7 +23,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
private def term = consoleReader.getTerminal()
def reset() = term.reset()
- def init() = term.init()
def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
@@ -37,11 +34,11 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ // working around protected/trait/java insufficiencies.
+ def goBack(num: Int): Unit = back(num)
if ((history: History) ne NoHistory)
this setHistory history
- // working around protected/trait/java insufficiencies.
- def goBack(num: Int): Unit = back(num)
def readOneKey(prompt: String) = {
this.print(prompt)
this.flush()
@@ -49,7 +46,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
def eraseLine() = consoleReader.resetPromptLine("", "", 0)
def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
- // override def readLine(prompt: String): String
// A hook for running code after the repl is done initializing.
lazy val postInit: Unit = {
@@ -66,11 +62,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
}
- def currentLine = consoleReader.getCursorBuffer.buffer.toString
def redrawLine() = consoleReader.redrawLineAndFlush()
- def eraseLine() = consoleReader.eraseLine()
- // Alternate implementation, not sure if/when I need this.
- // def eraseLine() = while (consoleReader.delete()) { }
def readOneLine(prompt: String) = consoleReader readLine prompt
def readOneKey(prompt: String) = consoleReader readOneKey prompt
}
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
new file mode 100644
index 0000000000..a895944c15
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -0,0 +1,693 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
+import scala.tools.nsc.util.ScalaClassLoader
+import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer }
+import java.util.{ Locale }
+import java.util.regex.Pattern
+import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
+ ForwardingJavaFileManager, JavaFileManager, JavaFileObject,
+ SimpleJavaFileObject, StandardLocation }
+import scala.reflect.io.{ AbstractFile, Directory, File, Path }
+import scala.io.Source
+import scala.util.{ Try, Success, Failure }
+import scala.util.Properties.lineSeparator
+import scala.collection.JavaConverters
+import scala.collection.generic.Clearable
+import java.net.URL
+import scala.language.reflectiveCalls
+import Javap._
+
+class JavapClass(
+ val loader: ScalaClassLoader,
+ val printWriter: PrintWriter,
+ intp: Option[IMain] = None
+) extends scala.tools.util.Javap {
+ import JavapTool.ToolArgs
+ import JavapClass._
+
+ lazy val tool = JavapTool()
+
+ /** Run the tool. Option args start with "-".
+ * The default options are "-protected -verbose".
+ * Byte data for filename args is retrieved with findBytes.
+ */
+ def apply(args: Seq[String]): List[JpResult] = {
+ val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1)
+ val (flags, upgraded) = upgrade(options)
+ import flags.{ app, fun, help, raw }
+ val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases
+ if (help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter)))
+ else if (targets.isEmpty) List(JpResult("No anonfuns found."))
+ else tool(raw, upgraded)(targets map (claas => claas -> bytesFor(claas, app)))
+ }
+
+ /** Cull our tool options. */
+ private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) = ToolArgs fromArgs options match {
+ case (t,s) if s.nonEmpty => (t,s)
+ case (t,s) => (t, JavapTool.DefaultOptions)
+ }
+
+ /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */
+ private def bytesFor(path: String, app: Boolean) = Try {
+ def last = intp.get.mostRecentVar // fail if no intp
+ def req = if (path == "-") last else {
+ val s = path.splitHashMember
+ if (s._1.nonEmpty) s._1
+ else s._2 getOrElse "#"
+ }
+ def asAppBody(s: String) = {
+ val (cls, fix) = s.splitSuffix
+ s"${cls}$$delayedInit$$body${fix}"
+ }
+ def todo = if (app) asAppBody(req) else req
+ val bytes = findBytes(todo)
+ if (bytes.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '${path}'")
+ else bytes
+ }
+
+ def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
+
+ /** Assume the string is a path and try to find the classfile
+ * it represents.
+ */
+ def tryFile(path: String): Option[Array[Byte]] =
+ (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption
+
+ /** Assume the string is a fully qualified class name and try to
+ * find the class object it represents.
+ * There are other symbols of interest, too:
+ * - a definition that is wrapped in an enclosing class
+ * - a synthetic that is not in scope but its associated class is
+ */
+ def tryClass(path: String): Array[Byte] = {
+ def load(name: String) = loader classBytes name
+ def loadable(name: String) = loader resourceable name
+ // if path has an interior dollar, take it as a synthetic
+ // if the prefix up to the dollar is a symbol in scope,
+ // result is the translated prefix + suffix
+ def desynthesize(s: String) = {
+ val i = s indexOf '$'
+ if (0 until s.length - 1 contains i) {
+ val name = s substring (0, i)
+ val sufx = s substring i
+ val tran = intp flatMap (_ translatePath name)
+ def loadableOrNone(strip: Boolean) = {
+ def suffix(strip: Boolean)(x: String) =
+ (if (strip && (x endsWith "$")) x.init else x) + sufx
+ val res = tran map (suffix(strip) _)
+ if (res.isDefined && loadable(res.get)) res else None
+ }
+ // try loading translated+suffix
+ val res = loadableOrNone(strip = false)
+ // some synthetics lack a dollar, (e.g., suffix = delayedInit$body)
+ // so as a hack, if prefix$$suffix fails, also try prefix$suffix
+ if (res.isDefined) res else loadableOrNone(strip = true)
+ } else None
+ }
+ val p = path.asClassName // scrub any suffix
+ // if repl, translate the name to something replish
+ // (for translate, would be nicer to get the sym and ask .isClass,
+ // instead of translatePath and then asking did I get a class back)
+ val q = if (intp.isEmpty) p else (
+ // only simple names get the scope treatment
+ Some(p) filter (_ contains '.')
+ // take path as a Name in scope
+ orElse (intp flatMap (_ translatePath p) filter loadable)
+ // take path as a Name in scope and find its enclosing class
+ orElse (intp flatMap (_ translateEnclosingClass p) filter loadable)
+ // take path as a synthetic derived from some Name in scope
+ orElse desynthesize(p)
+ // just try it plain
+ getOrElse p
+ )
+ load(q)
+ }
+
+ /** Base class for javap tool adapters for java 6 and 7. */
+ abstract class JavapTool {
+ type ByteAry = Array[Byte]
+ type Input = Pair[String, Try[ByteAry]]
+
+ /** Run the tool. */
+ def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult]
+
+ // Since the tool is loaded by reflection, check for catastrophic failure.
+ protected def failed: Boolean
+ implicit protected class Failer[A](a: =>A) {
+ def orFailed[B >: A](b: =>B) = if (failed) b else a
+ }
+ protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.")
+
+ // output filtering support
+ val writer = new CharArrayWriter
+ def written = {
+ writer.flush()
+ val w = writer.toString
+ writer.reset()
+ w
+ }
+
+ /** Create a Showable with output massage.
+ * @param raw show ugly repl names
+ * @param target attempt to filter output to show region of interest
+ * @param preamble other messages to output
+ */
+ def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable {
+ // ReplStrippingWriter clips and scrubs on write(String)
+ // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
+ def show() =
+ if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() }
+ else writeLines()
+ private def writeLines() {
+ // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
+ // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
+ val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
+ var filtering = false // true if in region matching filter
+ // true to output
+ def checkFilter(line: String) = if (filterOn.isEmpty) true else {
+ // cheap heuristic, todo maybe parse for the java sig.
+ // method sigs end in paren semi
+ def isAnyMethod = line.endsWith(");")
+ def isOurMethod = {
+ val lparen = line.lastIndexOf('(')
+ val blank = line.lastIndexOf(' ', lparen)
+ (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get)
+ }
+ filtering = if (filtering) {
+ // next blank line terminates section
+ // for -public, next line is next method, more or less
+ line.trim.nonEmpty && !isAnyMethod
+ } else {
+ isAnyMethod && isOurMethod
+ }
+ filtering
+ }
+ for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line))
+ printWriter write line+lineSeparator
+ printWriter.flush()
+ }
+ }
+ }
+
+ class JavapTool6 extends JavapTool {
+ import JavapTool._
+ val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
+ val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
+ override protected def failed = (EnvClass eq null) || (PrinterClass eq null)
+
+ val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null
+ val printWrapper = new PrintWriter(writer)
+ def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
+ PrinterCtr.newInstance(in, printWrapper, env) orFailed null
+ def showable(raw: Boolean, target: String, fp: FakePrinter): Showable = {
+ fp.asInstanceOf[{ def print(): Unit }].print() // run tool and flush to buffer
+ printWrapper.flush() // just in case
+ showWithPreamble(raw, target)
+ }
+
+ lazy val parser = new JpOptions
+ def newEnv(opts: Seq[String]): FakeEnvironment = {
+ def result = {
+ val env: FakeEnvironment = EnvClass.newInstance()
+ parser(opts) foreach { case (name, value) =>
+ val field = EnvClass getDeclaredField name
+ field setAccessible true
+ field.set(env, value.asInstanceOf[AnyRef])
+ }
+ env
+ }
+ result orFailed null
+ }
+
+ override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] =
+ (inputs map {
+ case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
+ case (_, Failure(e)) => JpResult(e.toString)
+ }).toList orFailed List(noToolError)
+ }
+
+ class JavapTool7 extends JavapTool {
+
+ import JavapTool._
+ type Task = {
+ def call(): Boolean // true = ok
+ //def run(args: Array[String]): Int // all args
+ //def handleOptions(args: Array[String]): Unit // options, then run() or call()
+ }
+ // result of Task.run
+ //object TaskResult extends Enumeration {
+ // val Ok, Error, CmdErr, SysErr, Abnormal = Value
+ //}
+ val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
+ override protected def failed = TaskClaas eq null
+
+ val TaskCtor = TaskClaas.getConstructor(
+ classOf[Writer],
+ classOf[JavaFileManager],
+ classOf[DiagnosticListener[_]],
+ classOf[JIterable[String]],
+ classOf[JIterable[String]]
+ ) orFailed null
+
+ class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable {
+ import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer }
+ type D = Diagnostic[_ <: JavaFileObject]
+ val diagnostics = new ArrayBuffer[D] with SynchronizedBuffer[D]
+ override def report(d: Diagnostic[_ <: JavaFileObject]) {
+ diagnostics += d
+ }
+ override def clear() = diagnostics.clear()
+ /** All diagnostic messages.
+ * @param locale Locale for diagnostic messages, null by default.
+ */
+ def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList
+
+ def reportable(raw: Boolean): String = {
+ // don't filter this message if raw, since the names are likely to differ
+ val container = "Binary file .* contains .*".r
+ val m = if (raw) messages
+ else messages filter (_ match { case container() => false case _ => true })
+ clear()
+ if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator)
+ else ""
+ }
+ }
+ val reporter = new JavaReporter
+
+ // DisassemblerTool.getStandardFileManager(reporter,locale,charset)
+ val defaultFileManager: JavaFileManager =
+ (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod (
+ "create",
+ classOf[DiagnosticListener[_]],
+ classOf[PrintWriter]
+ ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null
+
+ // manages named arrays of bytes, which might have failed to load
+ class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager)
+ extends ForwardingJavaFileManager[JavaFileManager](delegate) {
+ import JavaFileObject.Kind
+ import Kind._
+ import StandardLocation._
+ import JavaFileManager.Location
+ import java.net.URI
+ def uri(name: String): URI = new URI(name) // new URI("jfo:" + name)
+
+ def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2
+ def managedFile(name: String, kind: Kind) = kind match {
+ case CLASS => fileObjectForInput(name, inputNamed(name), kind)
+ case _ => null
+ }
+ // todo: just wrap it as scala abstractfile and adapt it uniformly
+ def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject =
+ new SimpleJavaFileObject(uri(name), kind) {
+ override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get)
+ // if non-null, ClassWriter wrongly requires scheme non-null
+ override def toUri: URI = null
+ override def getName: String = name
+ // suppress
+ override def getLastModified: Long = -1L
+ }
+ override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject =
+ location match {
+ case CLASS_PATH => managedFile(className, kind)
+ case _ => null
+ }
+ override def hasLocation(location: Location): Boolean =
+ location match {
+ case CLASS_PATH => true
+ case _ => false
+ }
+ }
+ def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)()
+
+ // show tool messages and tool output, with output massage
+ def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw))
+
+ // eventually, use the tool interface
+ def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = {
+ //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
+ //getTask(writer, fileManager, reporter, options.asJava, claases.asJava)
+ import JavaConverters.asJavaIterableConverter
+ TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava)
+ .orFailed (throw new IllegalStateException)
+ }
+ // a result per input
+ private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] =
+ Try {
+ task(options, Seq(claas), inputs).call()
+ } map {
+ case true => JpResult(showable(raw, claas))
+ case _ => JpResult(reporter.reportable(raw))
+ } recoverWith {
+ case e: java.lang.reflect.InvocationTargetException => e.getCause match {
+ case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option
+ case x => Failure(x)
+ }
+ } lastly {
+ reporter.clear()
+ }
+ override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map {
+ case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get
+ case (_, Failure(e)) => JpResult(e.toString)
+ }).toList orFailed List(noToolError)
+ }
+
+ object JavapTool {
+ // >= 1.7
+ val Tool = "com.sun.tools.javap.JavapTask"
+
+ // < 1.7
+ val Env = "sun.tools.javap.JavapEnvironment"
+ val Printer = "sun.tools.javap.JavapPrinter"
+ // "documentation"
+ type FakeEnvironment = AnyRef
+ type FakePrinter = AnyRef
+
+ // support JavapEnvironment
+ class JpOptions {
+ private object Access {
+ final val PRIVATE = 0
+ final val PROTECTED = 1
+ final val PACKAGE = 2
+ final val PUBLIC = 3
+ }
+ private val envActionMap: Map[String, (String, Any)] = {
+ val map = Map(
+ "-l" -> (("showLineAndLocal", true)),
+ "-c" -> (("showDisassembled", true)),
+ "-s" -> (("showInternalSigs", true)),
+ "-verbose" -> (("showVerbose", true)),
+ "-private" -> (("showAccess", Access.PRIVATE)),
+ "-package" -> (("showAccess", Access.PACKAGE)),
+ "-protected" -> (("showAccess", Access.PROTECTED)),
+ "-public" -> (("showAccess", Access.PUBLIC)),
+ "-all" -> (("showallAttr", true))
+ )
+ map ++ List(
+ "-v" -> map("-verbose"),
+ "-p" -> map("-private")
+ )
+ }
+ def apply(opts: Seq[String]): Seq[(String, Any)] = {
+ opts flatMap { opt =>
+ envActionMap get opt match {
+ case Some(pair) => List(pair)
+ case _ =>
+ val charOpts = opt.tail.toSeq map ("-" + _)
+ if (charOpts forall (envActionMap contains _))
+ charOpts map envActionMap
+ else Nil
+ }
+ }
+ }
+ }
+
+ case class ToolArgs(raw: Boolean = false, help: Boolean = false, app: Boolean = false, fun: Boolean = false)
+
+ object ToolArgs {
+ def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
+ case ((t,others), s) => s match {
+ case "-fun" => (t copy (fun=true), others)
+ case "-app" => (t copy (app=true), others)
+ case "-help" => (t copy (help=true), others)
+ case "-raw" => (t copy (raw=true), others)
+ case _ => (t, others :+ s)
+ }
+ }
+ }
+
+ val helps = List(
+ "usage" -> ":javap [opts] [path or class or -]...",
+ "-help" -> "Prints this help message",
+ "-raw" -> "Don't unmangle REPL names",
+ "-app" -> "Show the DelayedInit body of Apps",
+ "-fun" -> "Show anonfuns for class or Class#method",
+ "-verbose/-v" -> "Stack size, number of locals, method args",
+ "-private/-p" -> "Private classes and members",
+ "-package" -> "Package-private classes and members",
+ "-protected" -> "Protected classes and members",
+ "-public" -> "Public classes and members",
+ "-l" -> "Line and local variable tables",
+ "-c" -> "Disassembled code",
+ "-s" -> "Internal type signatures",
+ "-sysinfo" -> "System info of class",
+ "-constants" -> "Static final constants"
+ )
+
+ // match prefixes and unpack opts, or -help on failure
+ def massage(arg: String): Seq[String] = {
+ require(arg startsWith "-")
+ // arg matches opt "-foo/-f" if prefix of -foo or exactly -f
+ val r = """(-[^/]*)(/(-.))?""".r
+ def maybe(opt: String, s: String): Option[String] = opt match {
+ // disambiguate by preferring short form
+ case r(lf,_,sf) if s == sf => Some(sf)
+ case r(lf,_,sf) if lf startsWith s => Some(lf)
+ case _ => None
+ }
+ def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten
+ // one candidate or one single-char candidate
+ def uniqueOf(maybes: Seq[String]) = {
+ def single(s: String) = s.length == 2
+ if (maybes.length == 1) maybes
+ else if ((maybes count single) == 1) maybes filter single
+ else Nil
+ }
+ // each optchar must decode to exactly one option
+ def unpacked(s: String): Try[Seq[String]] = {
+ val ones = (s drop 1) map { c =>
+ val maybes = uniqueOf(candidates(s"-$c"))
+ if (maybes.length == 1) Some(maybes.head) else None
+ }
+ Try(ones) filter (_ forall (_.isDefined)) map (_.flatten)
+ }
+ val res = uniqueOf(candidates(arg))
+ if (res.nonEmpty) res
+ else (unpacked(arg)
+ getOrElse (Seq("-help"))) // or else someone needs help
+ }
+
+ def helper(pw: PrintWriter) = new Showable {
+ def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2))
+ }
+
+ val DefaultOptions = List("-protected", "-verbose")
+
+ def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn))
+
+ private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
+
+ private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool)
+
+ def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6
+ }
+}
+
+object JavapClass {
+ def apply(
+ loader: ScalaClassLoader = ScalaClassLoader.appLoader,
+ printWriter: PrintWriter = new PrintWriter(System.out, true),
+ intp: Option[IMain] = None
+ ) = new JavapClass(loader, printWriter, intp)
+
+ // We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget
+ // or a resource path com/acme/Widget.class; but not widget.out
+ implicit class MaybeClassLike(val s: String) extends AnyVal {
+ /* private[this] final val suffix = ".class" */
+ private def suffix = ".class"
+ def asClassName = (s stripSuffix suffix).replace('/', '.')
+ def asClassResource = if (s endsWith suffix) s else s.replace('.', '/') + suffix
+ def splitSuffix: (String, String) = if (s endsWith suffix) (s dropRight suffix.length, suffix) else (s, "")
+ def strippingSuffix(f: String => String): String =
+ if (s endsWith suffix) f(s dropRight suffix.length) else s
+ // e.g. Foo#bar. Foo# yields zero-length member part.
+ def splitHashMember: (String, Option[String]) = {
+ val i = s lastIndexOf '#'
+ if (i < 0) (s, None)
+ //else if (i >= s.length - 1) (s.init, None)
+ else (s take i, Some(s drop i+1))
+ }
+ }
+ implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal {
+ private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
+ def parents: List[ClassLoader] = parentsOf(cl)
+ /* all file locations */
+ def locations = {
+ def alldirs = parents flatMap (_ match {
+ case ucl: ScalaClassLoader.URLClassLoader => ucl.classPathURLs
+ case jcl: java.net.URLClassLoader => jcl.getURLs
+ case _ => Nil
+ })
+ val dirs = for (d <- alldirs; if d.getProtocol == "file") yield Path(new JFile(d.toURI))
+ dirs
+ }
+ /* only the file location from which the given class is loaded */
+ def locate(k: String): Option[Path] = {
+ Try {
+ val claas = try cl loadClass k catch {
+ case _: NoClassDefFoundError => null // let it snow
+ }
+ // cf ScalaClassLoader.originOfClass
+ claas.getProtectionDomain.getCodeSource.getLocation
+ } match {
+ case Success(null) => None
+ case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI)))
+ case _ => None
+ }
+ }
+ /* would classBytes succeed with a nonempty array */
+ def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null
+ }
+ implicit class PathOps(val p: Path) extends AnyVal {
+ import scala.tools.nsc.io.Jar
+ def isJar = Jar isJarOrZip p
+ }
+ implicit class URLOps(val url: URL) extends AnyVal {
+ def isFile: Boolean = url.getProtocol == "file"
+ }
+ object FunFinder {
+ def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
+ }
+ class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
+
+ // class k, candidate f without prefix
+ def isFunOfClass(k: String, f: String) = {
+ val p = (s"${Pattern quote k}\\$$+anonfun").r
+ (p findPrefixOf f).nonEmpty
+ }
+ // class k, candidate f without prefix, method m
+ def isFunOfMethod(k: String, m: String, f: String) = {
+ val p = (s"${Pattern quote k}\\$$+anonfun\\$$${Pattern quote m}\\$$").r
+ (p findPrefixOf f).nonEmpty
+ }
+ def isFunOfTarget(k: String, m: Option[String], f: String) =
+ if (m.isEmpty) isFunOfClass(k, f)
+ else isFunOfMethod(k, m.get, f)
+ def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = {
+ for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name
+ }
+ // path prefix p, class k, dir d
+ def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = {
+ val subdir = Path(p)
+ for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name))
+ yield f.name
+ }
+ // path prefix p, class k, jar file f
+ def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = {
+ import java.util.jar.JarEntry
+ import scala.tools.nsc.io.Jar
+ def maybe(e: JarEntry) = {
+ val (path, name) = {
+ val parts = e.getName split "/"
+ if (parts.length < 2) ("", e.getName)
+ else (parts.init mkString "/", parts.last)
+ }
+ if (path == p && isFunOfTarget(k, m, name)) Some(name) else None
+ }
+ (new Jar(f) map maybe).flatten
+ }
+ def loadable(name: String) = loader resourceable name
+ // translated class, optional member, opt member to filter on, whether it is repl output
+ def translate(s: String): (String, Option[String], Option[String], Boolean) = {
+ val (k0, m0) = s.splitHashMember
+ val k = k0.asClassName
+ val member = m0 filter (_.nonEmpty) // take Foo# as no member, not ""
+ val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply
+ // class is either something replish or available to loader
+ // $line.$read$$etc$Foo#member
+ ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true)))
+ // s = "f" and $line.$read$$etc$#f is what we're after,
+ // ignoring any #member (except take # as filter on #apply)
+ orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
+ getOrElse (k, member, filter, false))
+ }
+ /** Find the classnames of anonfuns associated with k,
+ * where k may be an available class or a symbol in scope.
+ */
+ def funsOf(k0: String): Seq[String] = {
+ // class is either something replish or available to loader
+ val (k, member, filter, isReplish) = translate(k0)
+ val splat = k split "\\."
+ val name = splat.last
+ val prefix = if (splat.length > 1) splat.init mkString "/" else ""
+ val pkg = if (splat.length > 1) splat.init mkString "." else ""
+ // reconstitute an anonfun with a package
+ // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
+ def packaged(s: String) = {
+ val p = if (pkg.isEmpty) s else s"$pkg.$s"
+ val pm = filter map (p + "#" + _)
+ pm getOrElse p
+ }
+ // is this translated path in (usually virtual) repl outdir? or loadable from filesystem?
+ val fs = if (isReplish) {
+ def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = {
+ if (p.isEmpty) Option(d)
+ else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail))
+ }
+ outed(intp.get.replOutput.dir, splat.init) map { d =>
+ listFunsInAbsFile(name, member, d) map packaged
+ }
+ } else {
+ loader locate k map { w =>
+ if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged
+ else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged
+ else Nil
+ }
+ }
+ fs match {
+ case Some(xs) => xs.to[Seq] // maybe empty
+ case None => Seq() // nothing found, e.g., junk input
+ }
+ }
+ def funs(ks: Seq[String]) = ks flatMap funsOf _
+ }
+}
+
+object Javap {
+ def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
+
+ def apply(path: String): Unit = apply(Seq(path))
+ def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
+
+ trait Showable {
+ def show(): Unit
+ }
+
+ sealed trait JpResult extends scala.tools.util.JpResult {
+ type ResultType
+ def isError: Boolean
+ def value: ResultType
+ def show(): Unit
+ // todo
+ // def header(): String
+ // def fields(): List[String]
+ // def methods(): List[String]
+ // def signatures(): List[String]
+ }
+ object JpResult {
+ def apply(msg: String) = new JpError(msg)
+ def apply(res: Showable) = new JpSuccess(res)
+ }
+ class JpError(msg: String) extends JpResult {
+ type ResultType = String
+ def isError = true
+ def value = msg
+ def show() = println(msg) // makes sense for :javap, less for -Ygen-javap
+ }
+ class JpSuccess(val value: Showable) extends JpResult {
+ type ResultType = AnyRef
+ def isError = false
+ def show() = value.show() // output to tool's PrintWriter
+ }
+ implicit class Lastly[A](val t: Try[A]) extends AnyVal {
+ private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
+ def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/repl/scala/tools/nsc/interpreter/Logger.scala
index aeb25fc688..7407daf8d0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Logger.scala
@@ -11,8 +11,4 @@ trait Logger {
def isDebug: Boolean
def isTrace: Boolean
def out: JPrintWriter
-
- def info(msg: => Any): Unit = if (isInfo) out println msg
- def debug(msg: => Any): Unit = if (isDebug) out println msg
- def trace(msg: => Any): Unit = if (isTrace) out println msg
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
index 60325ece30..4bba27b714 100644
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -19,13 +19,8 @@ class ProcessResult(val line: String) {
val exitCode = builder ! logger
def lines = buffer.toList
- def show() = lines foreach println
override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode)
}
-object ProcessResult {
- implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines
- def apply(line: String): ProcessResult = new ProcessResult(line)
-}
trait LoopCommands {
protected def out: JPrintWriter
@@ -35,14 +30,6 @@ trait LoopCommands {
// a single interpreter command
abstract class LoopCommand(val name: String, val help: String) extends (String => Result) {
- private var _longHelp: String = null
- final def defaultHelp = usageMsg + " (no extended help available.)"
- def hasLongHelp = _longHelp != null || longHelp != defaultHelp
- def withLongHelp(text: String): this.type = { _longHelp = text ; this }
- def longHelp = _longHelp match {
- case null => defaultHelp
- case text => text
- }
def usage: String = ""
def usageMsg: String = ":" + name + (
if (usage == "") "" else " " + usage
@@ -52,12 +39,7 @@ trait LoopCommands {
// called if no args are given
def showUsage(): Result = {
"usage is " + usageMsg
- Result(true, None)
- }
-
- def onError(msg: String) = {
- out.println("error: " + msg)
- showUsage()
+ Result(keepRunning = true, None)
}
}
object LoopCommand {
@@ -67,9 +49,6 @@ trait LoopCommands {
def cmd(name: String, usage: String, help: String, f: String => Result): LoopCommand =
if (usage == "") new NullaryCmd(name, help, f)
else new LineCmd(name, usage, help, f)
-
- def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand =
- new VarArgsCmd(name, usage, help, f)
}
class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) {
@@ -89,11 +68,11 @@ trait LoopCommands {
}
// the result of a single command
- case class Result(val keepRunning: Boolean, val lineToRecord: Option[String])
+ case class Result(keepRunning: Boolean, lineToRecord: Option[String])
object Result {
// the default result means "keep running, and don't record that line"
- val default = Result(true, None)
+ val default = Result(keepRunning = true, None)
// most commands do not want to micromanage the Result, but they might want
// to print something to the console, so we accomodate Unit and String returns.
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index 67519cf90c..84a47311e2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package interpreter
import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
-import scala.reflect.internal.Chars
import scala.reflect.internal.Flags._
import scala.language.implicitConversions
@@ -21,8 +19,6 @@ trait MemberHandlers {
private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
private def codegenln(xs: String*): String = codegenln(true, xs: _*)
-
- private def codegen(xs: String*): String = codegen(true, xs: _*)
private def codegen(leadingPlus: Boolean, xs: String*): String = {
val front = if (leadingPlus) "+ " else ""
front + (xs map string2codeQuoted mkString " + ")
@@ -52,24 +48,26 @@ trait MemberHandlers {
}
}
+ private def isTermMacro(ddef: DefDef): Boolean = ddef.mods.isMacro
+
def chooseHandler(member: Tree): MemberHandler = member match {
- case member: DefDef => new DefHandler(member)
- case member: ValDef => new ValHandler(member)
- case member: Assign => new AssignHandler(member)
- case member: ModuleDef => new ModuleHandler(member)
- case member: ClassDef => new ClassHandler(member)
- case member: TypeDef => new TypeAliasHandler(member)
- case member: Import => new ImportHandler(member)
- case DocDef(_, documented) => chooseHandler(documented)
- case member => new GenericHandler(member)
+ case member: DefDef if isTermMacro(member) => new TermMacroHandler(member)
+ case member: DefDef => new DefHandler(member)
+ case member: ValDef => new ValHandler(member)
+ case member: ModuleDef => new ModuleHandler(member)
+ case member: ClassDef => new ClassHandler(member)
+ case member: TypeDef => new TypeAliasHandler(member)
+ case member: Assign => new AssignHandler(member)
+ case member: Import => new ImportHandler(member)
+ case DocDef(_, documented) => chooseHandler(documented)
+ case member => new GenericHandler(member)
}
sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
- def symbol = if (member.symbol eq null) NoSymbol else member.symbol
- def name: Name = member.name
- def mods: Modifiers = member.mods
- def keyword = member.keyword
- def prettyName = name.decode
+ override def name: Name = member.name
+ def mods: Modifiers = member.mods
+ def keyword = member.keyword
+ def prettyName = name.decode
override def definesImplicit = member.mods.isImplicit
override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
@@ -81,9 +79,11 @@ trait MemberHandlers {
* in a single interpreter request.
*/
sealed abstract class MemberHandler(val member: Tree) {
+ def name: Name = nme.NO_NAME
+ def path = intp.originalPath(symbol)
+ def symbol = if (member.symbol eq null) NoSymbol else member.symbol
def definesImplicit = false
def definesValue = false
- def isLegalTopLevel = false
def definesTerm = Option.empty[TermName]
def definesType = Option.empty[TypeName]
@@ -91,7 +91,6 @@ trait MemberHandlers {
lazy val referencedNames = ImportVarsTraverser(member)
def importedNames = List[Name]()
def definedNames = definesTerm.toList ++ definesType.toList
- def definedOrImported = definedNames ++ importedNames
def definedSymbols = List[Symbol]()
def extraCodeToEvaluate(req: Request): String = ""
@@ -114,10 +113,10 @@ trait MemberHandlers {
// if this is a lazy val we avoid evaluating it here
val resultString =
if (mods.isLazy) codegenln(false, "<lazy>")
- else any2stringOf(req fullPath name, maxStringElements)
+ else any2stringOf(path, maxStringElements)
val vidString =
- if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name)
+ if (replProps.vids) s"""" + " @ " + "%%8x".format(System.identityHashCode($path)) + " """.trim
else ""
""" + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
@@ -126,17 +125,26 @@ trait MemberHandlers {
}
class DefHandler(member: DefDef) extends MemberDefHandler(member) {
- private def vparamss = member.vparamss
- private def isMacro = member.symbol hasFlag MACRO
- // true if not a macro and 0-arity
- override def definesValue = !isMacro && flattensToEmpty(vparamss)
+ override def definesValue = flattensToEmpty(member.vparamss) // true if 0-arity
override def resultExtractionCode(req: Request) =
if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
}
+ abstract class MacroHandler(member: DefDef) extends MemberDefHandler(member) {
+ override def definesValue = false
+ override def definesTerm: Option[TermName] = Some(name.toTermName)
+ override def definesType: Option[TypeName] = None
+ override def resultExtractionCode(req: Request) = if (mods.isPublic) codegenln(notification(req)) else ""
+ def notification(req: Request): String
+ }
+
+ class TermMacroHandler(member: DefDef) extends MacroHandler(member) {
+ def notification(req: Request) = s"defined term macro $name: ${req.typeOf(name)}"
+ }
+
class AssignHandler(member: Assign) extends MemberHandler(member) {
val Assign(lhs, rhs) = member
- val name = newTermName(freshInternalVarName())
+ override lazy val name = newTermName(freshInternalVarName())
override def definesTerm = Some(name)
override def definesValue = true
@@ -152,17 +160,16 @@ trait MemberHandlers {
}
class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
- override def definesTerm = Some(name)
+ override def definesTerm = Some(name.toTermName)
override def definesValue = true
- override def isLegalTopLevel = true
- override def resultExtractionCode(req: Request) = codegenln("defined module ", name)
+ override def resultExtractionCode(req: Request) = codegenln("defined object ", name)
}
class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
+ override def definedSymbols = List(symbol, symbol.companionSymbol) filterNot (_ == NoSymbol)
override def definesType = Some(name.toTypeName)
override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
- override def isLegalTopLevel = true
override def resultExtractionCode(req: Request) =
codegenln("defined %s %s".format(keyword, name))
@@ -178,21 +185,11 @@ trait MemberHandlers {
class ImportHandler(imp: Import) extends MemberHandler(imp) {
val Import(expr, selectors) = imp
- def targetType: Type = intp.typeOfExpression("" + expr)
- override def isLegalTopLevel = true
-
- def createImportForName(name: Name): String = {
- selectors foreach {
- case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel)
- case _ => ()
- }
- "import %s.%s".format(expr, name)
+ def targetType = intp.global.rootMirror.getModuleIfDefined("" + expr) match {
+ case NoSymbol => intp.typeOfExpression("" + expr)
+ case sym => sym.thisType
}
- // TODO: Need to track these specially to honor Predef masking attempts,
- // because they must be the leading imports in the code generated for each
- // line. We can use the same machinery as Contexts now, anyway.
- def isPredefImport = isReferenceToPredef(expr)
-
+ private def importableTargetMembers = importableMembers(targetType).toList
// wildcard imports, e.g. import foo._
private def selectorWild = selectors filter (_.name == nme.USCOREkw)
// renamed imports, e.g. import foo.{ bar => baz }
@@ -201,22 +198,16 @@ trait MemberHandlers {
/** Whether this import includes a wildcard import */
val importsWildcard = selectorWild.nonEmpty
- /** Whether anything imported is implicit .*/
- def importsImplicit = implicitSymbols.nonEmpty
-
def implicitSymbols = importedSymbols filter (_.isImplicit)
def importedSymbols = individualSymbols ++ wildcardSymbols
- lazy val individualSymbols: List[Symbol] =
- beforePickler(individualNames map (targetType nonPrivateMember _))
-
- lazy val wildcardSymbols: List[Symbol] =
- if (importsWildcard) beforePickler(targetType.nonPrivateMembers.toList)
- else Nil
+ private val selectorNames = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) toSet
+ lazy val individualSymbols: List[Symbol] = exitingTyper(importableTargetMembers filter (m => selectorNames(m.name)))
+ lazy val wildcardSymbols: List[Symbol] = exitingTyper(if (importsWildcard) importableTargetMembers else Nil)
/** Complete list of names imported by a wildcard */
lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name)
- lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames)
+ lazy val individualNames: List[Name] = individualSymbols map (_.name)
/** The names imported by this statement */
override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
index eff0ef59c5..a0af72940a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
@@ -10,18 +10,15 @@ import NamedParam._
import scala.language.implicitConversions
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
+import scala.tools.nsc.typechecker.{ TypeStrings }
trait NamedParamCreator {
protected def freshName: () => String
- def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value)
def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x)
def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x)
-
def clazz(name: String, x: Any): NamedParam = new Untyped(name, x)
- def clazz(x: Any): NamedParam = clazz(freshName(), x)
- implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x)
implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
index 0d03a8669a..57f3675ada 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
@@ -6,6 +6,8 @@
package scala.tools.nsc
package interpreter
+import scala.util.Properties.lineSeparator
+
/** This is for name logic which is independent of the compiler (notice there's no Global.)
* That includes at least generating, metaquoting, mangling, and unmangling.
*/
@@ -18,8 +20,14 @@ trait Naming {
// <ESC> for ansi codes.
val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
// Lots of binary chars - translate all supposed whitespace into spaces
- if (binaryChars > 5)
- cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch)
+ // except supposed line endings, otherwise scrubbed lines run together
+ if (binaryChars > 5) // more than one can count while holding a hamburger
+ cleaned map {
+ case c if lineSeparator contains c => c
+ case c if c.isWhitespace => ' '
+ case c if c < 32 => '?'
+ case c => c
+ }
// Not lots - preserve whitespace and ESC
else
cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
@@ -78,7 +86,6 @@ trait Naming {
private lazy val userVar = new NameCreator(sessionNames.res) // var name, like res0
private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0
- def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit))
def isUserVarName(name: String) = userVar didGenerate name
def isInternalVarName(name: String) = internalVar didGenerate name
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
index b0be956df8..672a6fd28f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
import util.returning
/** One instance of a command buffer.
@@ -18,7 +17,6 @@ class Parsed private (
) extends Delimited {
def isEmpty = args.isEmpty
def isUnqualified = args.size == 1
- def isQualified = args.size > 1
def isAtStart = cursor <= 0
private var _verbosity = 0
@@ -32,7 +30,6 @@ class Parsed private (
def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity
def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity
- def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity
def currentChar = buffer(cursor)
def currentArg = args.last
def position =
@@ -42,8 +39,6 @@ class Parsed private (
def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head)
def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last)
- def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else ""
- def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else ""
def isQuoted = false // TODO
def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar)
@@ -57,13 +52,9 @@ object Parsed {
private def onull(s: String) = if (s == null) "" else s
- def apply(s: String): Parsed = apply(onull(s), onull(s).length)
def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters)
def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed =
new Parsed(onull(s), cursor, delimited)
- def dotted(s: String): Parsed = dotted(onull(s), onull(s).length)
def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.')
-
- def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
index f5db3d9e3a..f5db3d9e3a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/repl/scala/tools/nsc/interpreter/Phased.scala
index 638944713a..f625124e70 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Phased.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package interpreter
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
import scala.language.implicitConversions
/** Mix this into an object and use it as a phasing
@@ -24,7 +24,6 @@ trait Phased {
case NoPhaseName => false
case name => active = name ; true
}
- def getMulti = multi
def setMulti(phases: Seq[PhaseName]): Boolean = {
if (phases contains NoPhaseName) false
else {
@@ -66,16 +65,8 @@ trait Phased {
try parseInternal(str)
catch { case _: Exception => NoPhaseName }
- def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*)
-
- def atCurrent[T](body: => T): T = atPhase(get)(body)
+ def atCurrent[T](body: => T): T = enteringPhase(get)(body)
def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body))
- def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body)
- def show[T](body: => T): Seq[T] = {
- val pairs = atMap(PhaseName.all)(body)
- pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) }
- pairs map (_._2)
- }
def at[T](ph: PhaseName)(body: => T): T = {
val saved = get
@@ -90,11 +81,6 @@ trait Phased {
finally setMulti(saved)
}
- def showAt[T](phs: Seq[PhaseName])(body: => T): Unit =
- atMap[T](phs)(body) foreach {
- case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240))
- }
-
def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] =
phs zip atMulti(phs)(body)
@@ -112,16 +98,12 @@ trait Phased {
def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName
implicit def apply(s: String): PhaseName = nameMap(s)
- implicit def defaultPhaseName: PhaseName = active
}
sealed abstract class PhaseName {
lazy val id = phase.id
lazy val name = toString.toLowerCase
def phase = currentRun.phaseNamed(name)
def isEmpty = this eq NoPhaseName
-
- // Execute some code during this phase.
- def apply[T](body: => T): T = atPhase(phase)(body)
}
case object Parser extends PhaseName
@@ -158,5 +140,4 @@ trait Phased {
}
implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase
- implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index 5e6bf8824d..e517a16b32 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -8,8 +8,6 @@ package interpreter
import scala.collection.{ mutable, immutable }
import scala.util.matching.Regex
-import scala.reflect.internal.util.{ BatchSourceFile }
-import session.{ History }
import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
@@ -48,7 +46,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
import intp.global._
import definitions.{ compilerTypeFromTag, compilerSymbolFromTag}
- import rootMirror.{ getClassIfDefined, getModuleIfDefined }
abstract class SymSlurper {
def isKeep(sym: Symbol): Boolean
@@ -73,7 +70,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
pass += 1
val (repeats, unseen) = todo partition seen
unseenHistory += unseen.size
- if (opt.verbose) {
+ if (settings.verbose.value) {
println("%3d %s accumulated, %s discarded. This pass: %s unseen, %s repeats".format(
pass, keep.size, discarded, unseen.size, repeats.size))
}
@@ -148,21 +145,10 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
// First we create the ReplVals instance and bind it to $r
intp.bind("$r", replVals)
// Then we import everything from $r.
- intp interpret ("import " + intp.pathToTerm("$r") + "._")
+ intp interpret ("import " + intp.originalPath("$r") + "._")
// And whatever else there is to do.
init.lines foreach (intp interpret _)
}
- def valsDescription: String = {
- def to_str(m: Symbol) = "%12s %s".format(
- m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.")
-
- ( rutil.info[ReplValsImpl].membersDeclared
- filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
- sortBy (_.decodedName)
- map to_str
- mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
- )
- }
trait LowPriorityInternalInfo {
implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None)
@@ -175,12 +161,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
* symbol, by only implicitly installing one method, "?", and the rest
* of the conveniences exist on that wrapper.
*/
- trait LowPriorityInternalInfoWrapper {
- implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
- }
- object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
-
- }
+ trait LowPriorityInternalInfoWrapper { }
class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) {
def ? : InternalInfo[T] = new InternalInfo[T](value)
}
@@ -190,7 +171,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
* customizable symbol filter (had to hardcode no-spec to reduce noise)
*/
class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) {
- private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
@@ -201,47 +181,15 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
|| s.isAnonOrRefinementClass
|| s.isAnonymousFunction
)
- def symbol = compilerSymbolFromTag(tag)
- def tpe = compilerTypeFromTag(tag)
- def name = symbol.name
- def companion = symbol.companionSymbol
- def info = symbol.info
- def moduleClass = symbol.moduleClass
- def owner = symbol.owner
- def owners = symbol.ownerChain drop 1
- def signature = symbol.defString
-
- def decls = info.decls
- def declsOverride = membersDeclared filter (_.isOverride)
- def declsOriginal = membersDeclared filterNot (_.isOverride)
-
+ def symbol = compilerSymbolFromTag(tag)
+ def tpe = compilerTypeFromTag(tag)
def members = membersUnabridged filterNot excludeMember
def membersUnabridged = tpe.members.toList
- def membersDeclared = members filterNot excludeMember
- def membersInherited = members filterNot (membersDeclared contains _)
- def memberTypes = members filter (_.name.isTypeName)
- def memberMethods = members filter (_.isMethod)
-
- def pkg = symbol.enclosingPackage
- def pkgName = pkg.fullName
- def pkgClass = symbol.enclosingPackageClass
- def pkgMembers = pkg.info.members filterNot excludeMember
- def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage)
- def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember
-
- def tag = typeEvidence
- def runtimeClass = runtimeClassEvidence.runtimeClass
- def shortClass = runtimeClass.getName split "[$.]" last
-
- def baseClasses = tpe.baseClasses
- def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name))
- def ancestors = baseClasses drop 1
- def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol)
- def baseTypes = tpe.baseTypeSeq.toList
-
- def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe
- def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
- def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
+ def pkg = symbol.enclosingPackage
+ def tag = typeEvidence
+ def runtimeClass = runtimeClassEvidence.runtimeClass
+ def shortClass = runtimeClass.getName split "[$.]" last
+ def baseClasses = tpe.baseClasses
override def toString = value match {
case Some(x) => "%s (%s)".format(x, shortClass)
@@ -267,7 +215,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
}
object Prettifier extends LowPriorityPrettifier {
def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x)
- def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value
def default[T] = new Prettifier[T] {
def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x
def show(x: T): Unit = AnyPrettifier show x
@@ -277,45 +224,21 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
def show(x: T): Unit
def prettify(x: T): TraversableOnce[String]
- def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println
def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x))
}
abstract class PrettifierClass[T: Prettifier]() {
val pretty = implicitly[Prettifier[T]]
- import pretty._
-
def value: Seq[T]
def pp(f: Seq[T] => Seq[T]): Unit =
pretty prettify f(value) foreach (StringPrettifier show _)
def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap)
- def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) }
-
- def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value)
- def ^^[U](f: T => U): Seq[U] = value map f
- def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf
- def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct)
def >>(implicit ord: Ordering[T]): Unit = pp(_.sorted)
def >!(): Unit = pp(_.distinct)
def >(): Unit = pp(identity)
-
- def >#(): Unit = this ># (identity[T] _)
- def >#[U](p: T => U): Unit = this ppfreq p
-
- def >?(p: T => Boolean): Unit = pp(_ filter p)
- def >?(s: String): Unit = pp(_ filter (_.toString contains s))
- def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r)))
-
- private def fixRegex(r: scala.util.matching.Regex): String = {
- val s = r.pattern.toString
- val prefix = if (s startsWith "^") "" else """^.*?"""
- val suffix = if (s endsWith "$") "" else """.*$"""
-
- prefix + s + suffix
- }
}
class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { }
@@ -339,17 +262,11 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
class RichReplURL(url: URL)(implicit codec: Codec) {
def slurp(): String = io.Streamable.slurp(url)
}
- class RichSymbolList(syms: List[Symbol]) {
- def sigs = syms map (_.defString)
- def infos = syms map (_.info)
- }
trait Implicits1 {
// fallback
implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
new SinglePrettifierClass[T](x)
-
- implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
}
trait Implicits2 extends Implicits1 {
class RichSymbol(sym: Symbol) {
@@ -374,26 +291,13 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
-
- implicit def liftToTermName(s: String): TermName = newTermName(s)
- implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
}
trait ReplUtilities {
- // [Eugene to Paul] needs review!
- // def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
- // def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName)
def module[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isPackage)
def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass)
def info[T: ru.TypeTag : ClassTag] = InternalInfo[T]
def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T]
- def url(s: String) = {
- try new URL(s)
- catch { case _: MalformedURLException =>
- if (Path(s).exists) Path(s).toURL
- else new URL("http://" + s)
- }
- }
def sanitize(s: String): String = sanitize(s.getBytes())
def sanitize(s: Array[Byte]): String = (s map {
case x if x.toChar.isControl => '?'
@@ -411,20 +315,12 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
lazy val rutil: ReplUtilities = new ReplUtilities { }
lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
- def context(code: String) = analyzer.rootContext(unit(code))
- def source(code: String) = newSourceFile(code)
- def unit(code: String) = newCompilationUnit(code)
- def trees(code: String) = parse(code) getOrElse Nil
- def typeOf(id: String) = intp.typeOfExpression(id)
+ def unit(code: String) = newCompilationUnit(code)
+ def trees(code: String) = parse(code) getOrElse Nil
- override def toString = """
+ override def toString = s"""
|** Power mode status **
- |Default phase: %s
- |Names: %s
- |Identifiers: %s
- """.stripMargin.format(
- phased.get,
- intp.allDefinedNames mkString " ",
- intp.unqualifiedIds mkString " "
- )
+ |Default phase: ${phased.get}
+ |Names: ${intp.unqualifiedIds mkString " "}
+ """.stripMargin
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
index 7cd0f436c4..3392ea0b5e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
@@ -14,9 +14,7 @@ trait ReplConfig {
lazy val replProps = new ReplProps
class TapMaker[T](x: T) {
- def tapInfo(msg: => String): T = tap(x => replinfo(parens(x)))
def tapDebug(msg: => String): T = tap(x => repldbg(parens(x)))
- def tapTrace(msg: => String): T = tap(x => repltrace(parens(x)))
def tap[U](f: T => U): T = {
f(x)
x
@@ -28,12 +26,6 @@ trait ReplConfig {
try Console println msg
catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
- private[nsc] def repldbgex(ex: Throwable): Unit = {
- if (isReplDebug) {
- echo("Caught/suppressing: " + ex)
- ex.printStackTrace
- }
- }
private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg)
private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg)
private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
@@ -45,14 +37,10 @@ trait ReplConfig {
repltrace(stackTraceString(unwrap(t)))
alt
}
- private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T =
- substituteAndLog("" + alt, alt)(body)
private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
try body
catch logAndDiscard(label, alt)
}
- private[nsc] def squashAndLog(label: String)(body: => Unit): Unit =
- substituteAndLog(label, ())(body)
def isReplTrace: Boolean = replProps.trace
def isReplDebug: Boolean = replProps.debug || isReplTrace
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplDir.scala b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
new file mode 100644
index 0000000000..5d386b47b7
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
@@ -0,0 +1,48 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import io.VirtualDirectory
+import settings.MutableSettings
+import scala.reflect.io.{ AbstractFile, PlainDirectory, Directory }
+import scala.collection.generic.Clearable
+
+/** Directory to save .class files to. */
+trait ReplDir extends AbstractFile with Clearable { }
+
+private class ReplVirtualDir() extends VirtualDirectory("(memory)", None) with ReplDir { }
+private class ReplRealDir(dir: Directory) extends PlainDirectory(dir) with ReplDir {
+ def clear() = {
+ dir.deleteRecursively()
+ dir.createDirectory()
+ }
+}
+
+class ReplOutput(val dirSetting: MutableSettings#StringSetting) {
+ // outdir for generated classfiles - may be in-memory (the default),
+ // a generated temporary directory, or a specified outdir.
+ val dir: ReplDir = (
+ if (dirSetting.isDefault)
+ new ReplVirtualDir()
+ else if (dirSetting.value == "")
+ new ReplRealDir(Directory.makeTemp("repl"))
+ else
+ new ReplRealDir(Directory(dirSetting.value))
+ )
+
+ // print the contents hierarchically
+ def show(out: JPrintWriter) = {
+ def pp(root: AbstractFile, indentLevel: Int) {
+ val label = root.name
+ val spaces = " " * indentLevel
+ out.println(spaces + label)
+ if (root.isDirectory)
+ root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+ }
+ pp(dir, 0)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
index 7c698a2f3e..51fab3082e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package interpreter
-import reporters._
import typechecker.Analyzer
/** A layer on top of Global so I can guarantee some extra
@@ -24,8 +23,16 @@ trait ReplGlobal extends Global {
override lazy val analyzer = new {
val global: ReplGlobal.this.type = ReplGlobal.this
} with Analyzer {
+
+ override protected def findMacroClassLoader(): ClassLoader = {
+ val loader = super.findMacroClassLoader
+ macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs))
+ val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get
+ new util.AbstractFileClassLoader(virtualDirectory, loader) {}
+ }
+
override def newTyper(context: Context): Typer = new Typer(context) {
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
val res = super.typed(tree, mode, pt)
tree match {
case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index bc3e7a10d7..2364918494 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -13,15 +13,11 @@ class ReplProps {
private def bool(name: String) = BooleanProp.keyExists(name)
private def int(name: String) = IntProp(name)
- val jlineDebug = bool("scala.tools.jline.internal.Log.debug")
- val jlineTrace = bool("scala.tools.jline.internal.Log.trace")
-
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
val trace = bool("scala.repl.trace")
val power = bool("scala.repl.power")
- val replInitCode = Prop[JFile]("scala.repl.initcode")
val replAutorunCode = Prop[JFile]("scala.repl.autoruncode")
val powerInitCode = Prop[JFile]("scala.repl.power.initcode")
val powerBanner = Prop[JFile]("scala.repl.power.banner")
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
index b20166d070..b20166d070 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
index f8ecc6c6fe..08472bbc64 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
import scala.reflect.internal.Chars
trait ReplStrings {
@@ -31,5 +29,4 @@ trait ReplStrings {
"scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
def words(s: String) = s.trim split "\\s+" filterNot (_ == "") toList
- def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
index 53478bdc5d..ea100b25f2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
@@ -57,7 +57,6 @@ object ReplVals {
*/
def mkCompilerTypeFromTag[T <: Global](global: T) = {
import global._
- import definitions._
/** We can't use definitions.compilerTypeFromTag directly because we're passing
* it to map and the compiler refuses to perform eta expansion on a method
diff --git a/src/compiler/scala/tools/nsc/interpreter/Results.scala b/src/repl/scala/tools/nsc/interpreter/Results.scala
index e400906a58..e400906a58 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Results.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Results.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
index 4371f7fe05..36cdf65510 100644
--- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
@@ -10,7 +10,6 @@ import scala.reflect.{ ClassTag, classTag }
class RichClass[T](val clazz: Class[T]) {
def toTag: ClassTag[T] = ClassTag[T](clazz)
- def toTypeString: String = TypeStrings.fromClazz(clazz)
// Sadly isAnonymousClass does not return true for scala anonymous
// classes because our naming scheme is not doing well against the
@@ -20,14 +19,12 @@ class RichClass[T](val clazz: Class[T]) {
catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name"
)
- /** It's not easy... to be... me... */
- def supermans: List[ClassTag[_]] = supers map (_.toTag)
+ def supertags: List[ClassTag[_]] = supers map (_.toTag)
def superNames: List[String] = supers map (_.getName)
def interfaces: List[JClass] = supers filter (_.isInterface)
def hasAncestorName(f: String => Boolean) = superNames exists f
def hasAncestor(f: JClass => Boolean) = supers exists f
- def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + "."))
def supers: List[JClass] = {
def loop(x: JClass): List[JClass] = x.getSuperclass match {
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
index bccd8158ec..2d0917d91f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -19,11 +19,8 @@ extends InteractiveReader
val history = NoHistory
val completion = NoCompletion
- def init() = ()
def reset() = ()
- def eraseLine() = ()
def redrawLine() = ()
- def currentLine = ""
def readOneLine(prompt: String): String = {
if (interactive) {
out.print(prompt)
@@ -40,4 +37,4 @@ object SimpleReader {
def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader =
new SimpleReader(in, out, interactive)
-} \ No newline at end of file
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
new file mode 100644
index 0000000000..ebbb397a0c
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
@@ -0,0 +1,15 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.tools.reflect.StdTags
+import scala.reflect.runtime.{ universe => ru }
+
+trait StdReplTags extends StdTags {
+ lazy val tagOfStdReplVals = tagOfStaticClass[StdReplVals]
+ lazy val tagOfIMain = tagOfStaticClass[IMain]
+}
+
+object StdReplTags extends StdTags with StdReplTags {
+ val u: ru.type = ru
+ val m = u.runtimeMirror(getClass.getClassLoader)
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
new file mode 100644
index 0000000000..52a085080b
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -0,0 +1,157 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+
+import scala.language.implicitConversions
+import scala.reflect.{ classTag, ClassTag }
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
+
+/** The main REPL related classes and values are as follows.
+ * In addition to standard compiler classes Global and Settings, there are:
+ *
+ * History: an interface for session history.
+ * Completion: an interface for tab completion.
+ * ILoop (formerly InterpreterLoop): The umbrella class for a session.
+ * IMain (formerly Interpreter): Handles the evolving state of the session
+ * and handles submitting code to the compiler and handling the output.
+ * InteractiveReader: how ILoop obtains input.
+ * History: an interface for session history.
+ * Completion: an interface for tab completion.
+ * Power: a repository for more advanced/experimental features.
+ *
+ * ILoop contains { in: InteractiveReader, intp: IMain, settings: Settings, power: Power }
+ * InteractiveReader contains { history: History, completion: Completion }
+ * IMain contains { global: Global }
+ */
+package object interpreter extends ReplConfig with ReplStrings {
+ type JFile = java.io.File
+ type JClass = java.lang.Class[_]
+ type JList[T] = java.util.List[T]
+ type JCollection[T] = java.util.Collection[T]
+ type JPrintWriter = java.io.PrintWriter
+ type InputStream = java.io.InputStream
+ type OutputStream = java.io.OutputStream
+
+ val IR = Results
+
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
+
+ private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
+ import scala.collection.JavaConverters._
+ xs.asScala.toList map ("" + _)
+ }
+
+ private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz)
+ private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x)
+ private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg)
+
+ private val ourClassloader = getClass.getClassLoader
+
+ def staticTypeTag[T: ClassTag]: ru.TypeTag[T] = ru.TypeTag[T](
+ ru.runtimeMirror(ourClassloader),
+ new TypeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type =
+ m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type]
+ })
+
+ /** This class serves to trick the compiler into treating a var
+ * (intp, in ILoop) as a stable identifier.
+ */
+ implicit class IMainOps(val intp: IMain) {
+ import intp._
+ import global.{ reporter => _, _ }
+ import definitions._
+
+ protected def echo(msg: String) = {
+ Console.out println msg
+ Console.out.flush()
+ }
+
+ def implicitsCommand(line: String): String = {
+ def p(x: Any) = intp.reporter.printMessage("" + x)
+
+ // If an argument is given, only show a source with that
+ // in its name somewhere.
+ val args = line split "\\s+"
+ val filtered = intp.implicitSymbolsBySource filter {
+ case (source, syms) =>
+ (args contains "-v") || {
+ if (line == "") (source.fullName.toString != "scala.Predef")
+ else (args exists (source.name.toString contains _))
+ }
+ }
+
+ if (filtered.isEmpty)
+ return "No implicits have been imported other than those in Predef."
+
+ filtered foreach {
+ case (source, syms) =>
+ p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
+
+ // This groups the members by where the symbol is defined
+ val byOwner = syms groupBy (_.owner)
+ val sortedOwners = byOwner.toList sortBy { case (owner, _) => exitingTyper(source.info.baseClasses indexOf owner) }
+
+ sortedOwners foreach {
+ case (owner, members) =>
+ // Within each owner, we cluster results based on the final result type
+ // if there are more than a couple, and sort each cluster based on name.
+ // This is really just trying to make the 100 or so implicits imported
+ // by default into something readable.
+ val memberGroups: List[List[Symbol]] = {
+ val groups = members groupBy (_.tpe.finalResultType) toList
+ val (big, small) = groups partition (_._2.size > 3)
+ val xss = (
+ (big sortBy (_._1.toString) map (_._2)) :+
+ (small flatMap (_._2))
+ )
+
+ xss map (xs => xs sortBy (_.name.toString))
+ }
+
+ val ownerMessage = if (owner == source) " defined in " else " inherited from "
+ p(" /* " + members.size + ownerMessage + owner.fullName + " */")
+
+ memberGroups foreach { group =>
+ group foreach (s => p(" " + intp.symbolDefString(s)))
+ p("")
+ }
+ }
+ p("")
+ }
+ ""
+ }
+
+ /** TODO -
+ * -n normalize
+ * -l label with case class parameter names
+ * -c complete - leave nothing out
+ */
+ def typeCommandInternal(expr: String, verbose: Boolean): Unit =
+ symbolOfLine(expr) andAlso (echoTypeSignature(_, verbose))
+
+ def printAfterTyper(msg: => String) =
+ reporter printUntruncatedMessage exitingTyper(msg)
+
+ private def replInfo(sym: Symbol) =
+ if (sym.isAccessor) dropNullaryMethod(sym.info) else sym.info
+
+ def echoTypeStructure(sym: Symbol) =
+ printAfterTyper("" + deconstruct.show(replInfo(sym)))
+
+ def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
+ if (verbose) echo("// Type signature")
+ printAfterTyper("" + replInfo(sym))
+
+ if (verbose) {
+ echo("\n// Internal Type structure")
+ echoTypeStructure(sym)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
index dddfb1b8f6..dddfb1b8f6 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/repl/scala/tools/nsc/interpreter/session/History.scala
index daa05b86db..794d41adc7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/History.scala
@@ -14,15 +14,9 @@ trait History {
def asStrings: List[String]
def index: Int
def size: Int
- def grep(s: String): List[String]
}
object NoHistory extends History {
def asStrings = Nil
- def grep(s: String) = Nil
def index = 0
def size = 0
}
-
-object History {
- def empty: History = NoHistory
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
index 18e0ee7c85..18e0ee7c85 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
index 9f4e2b9df3..89998e438a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
@@ -54,9 +54,5 @@ class SimpleHistory extends JLineHistory {
def moveTo(idx: Int) = (idx > 0) && (idx <= lastIndex) && setTo(idx)
def moveToEnd(): Unit = setTo(size)
- // scala legacy interface
- def asList: List[JEntry] = toEntries().toList
- def asJavaList = entries()
- def asStrings = buf.toList
- def grep(s: String) = buf.toList filter (_ contains s)
+ def asStrings = buf.toList
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala
index c62cf21151..c62cf21151 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 88ef8ae2a1..2acc460b5e 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -87,11 +87,6 @@ trait Commands extends Prop {
private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)]
- private def initState() = {
- bindings.clear()
- initialState()
- }
-
private def genCmds: Gen[Cmds] = {
def sizedCmds(s: State)(sz: Int): Gen[Cmds] =
if(sz <= 0) value(Cmds(Nil, Nil)) else for {
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
index 7fc811788e..fd6d637212 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
@@ -78,7 +78,7 @@ class Scaladoc extends ScalaMatchingTask {
val values = List("yes", "no", "on", "off")
def getBooleanValue(value: String, flagName: String): Boolean =
if (Flag.isPermissible(value))
- return ("yes".equals(value) || "on".equals(value))
+ ("yes".equals(value) || "on".equals(value))
else
buildError("Unknown " + flagName + " flag '" + value + "'")
}
@@ -563,7 +563,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Transforms a file into a Scalac-readable string.
*
- * @param path A file to convert.
+ * @param file A file to convert.
* @return A string-representation of the file like `/x/k/a.scala`.
*/
private def asString(file: File): String =
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
index ba434bc797..52a0c20a11 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
@@ -10,7 +10,6 @@ import java.io.File.pathSeparator
import scala.tools.nsc.doc.DocFactory
import scala.tools.nsc.reporters.ConsoleReporter
import scala.reflect.internal.util.FakePos
-import Properties.msilLibPath
/** The main class for scaladoc, a front-end for the Scala compiler
* that generates documentation from source files.
@@ -42,12 +41,8 @@ class ScalaDoc {
reporter.warning(null, "Phases are restricted when using Scaladoc")
else if (docSettings.help.value || !hasFiles)
reporter.echo(command.usageMsg)
- else try {
- if (docSettings.target.value == "msil")
- msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
-
- new DocFactory(reporter, docSettings) document command.files
- }
+ else
+ try { new DocFactory(reporter, docSettings) document command.files }
catch {
case ex @ FatalError(msg) =>
if (docSettings.debug.value) ex.printStackTrace()
@@ -65,7 +60,7 @@ object ScalaDoc extends ScalaDoc {
class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) {
override def cmdName = "scaladoc"
override def usageMsg = (
- createUsageMsg("where possible scaladoc", false, x => x.isStandard && settings.isScaladocSpecific(x.name)) +
+ createUsageMsg("where possible scaladoc", shouldExplain = false, x => x.isStandard && settings.isScaladocSpecific(x.name)) +
"\n\nStandard scalac options also available:" +
createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name))
)
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
index a091b04993..b4d2adaad4 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
@@ -8,15 +8,13 @@ package doc
import scala.util.control.ControlThrowable
import reporters.Reporter
-import scala.reflect.internal.util.{ NoPosition, BatchSourceFile}
-import io.{ File, Directory }
-import DocParser.Parsed
+import scala.reflect.internal.util.BatchSourceFile
/** A documentation processor controls the process of generating Scala
* documentation, which is as follows.
*
* * A simplified compiler instance (with only the front-end phases enabled)
- * * is created, and additional ''sourceless'' comments are registered.
+ * * is created, and additional `sourceless` comments are registered.
* * Documentable files are compiled, thereby filling the compiler's symbol table.
* * A documentation model is extracted from the post-compilation symbol table.
* * A generator is used to transform the model into the correct final format (HTML).
@@ -33,15 +31,7 @@ import DocParser.Parsed
* @author Gilles Dubochet */
class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor =>
/** The unique compiler instance used by this processor and constructed from its `settings`. */
- object compiler extends Global(settings, reporter) with interactive.RangePositions {
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.packageObjects
- phasesSet += analyzer.typerFactory
- }
- override def forScaladoc = true
- }
+ object compiler extends ScaladocGlobal(settings, reporter)
/** Creates a scaladoc site for all symbols defined in this call's `source`,
* as well as those defined in `sources` of previous calls to the same processor.
@@ -104,7 +94,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val documentError: PartialFunction[Throwable, Unit] = {
case NoCompilerRunException =>
- reporter.info(null, "No documentation generated with unsucessful compiler run", false)
+ reporter.info(null, "No documentation generated with unsucessful compiler run", force = false)
case _: ClassNotFoundException =>
()
}
@@ -128,7 +118,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
}
case _ => ()
}
- docletInstance.generate
+ docletInstance.generate()
}
try generate()
diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
index 27c995e1c3..6dc3e5a62b 100644
--- a/src/compiler/scala/tools/nsc/doc/DocParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -9,24 +9,19 @@ package doc
import reporters._
import scala.reflect.internal.util._
-import interactive.RangePositions
import DocParser.Parsed
/** A very minimal global customized for extracting `DocDefs`. It stops
* right after parsing so it can read `DocDefs` from source code which would
* otherwise cause the compiler to go haywire.
*/
-class DocParser(settings: nsc.Settings, reporter: Reporter)
- extends Global(settings, reporter)
- with RangePositions {
-
+class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) {
def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
def this() = this(new Settings(Console println _))
// the usual global initialization
locally { new Run() }
- override def forScaladoc = true
override protected def computeInternalPhases() {
phasesSet += syntaxAnalyzer
}
@@ -47,7 +42,7 @@ class DocParser(settings: nsc.Settings, reporter: Reporter)
*/
def docUnit(code: String) = {
val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
- val scanner = new syntaxAnalyzer.UnitParser(unit)
+ val scanner = newUnitParser(unit)
scanner.compilationUnit()
}
diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
index f9b9eecdb3..f9b9eecdb3 100644
--- a/src/compiler/scala/tools/nsc/doc/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
new file mode 100644
index 0000000000..5ad50445a8
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -0,0 +1,234 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import scala.reflect.internal.Chars._
+import symtab._
+import reporters.Reporter
+import typechecker.Analyzer
+import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+
+trait ScaladocAnalyzer extends Analyzer {
+ val global : Global // generally, a ScaladocGlobal
+ import global._
+
+ override def newTyper(context: Context): ScaladocTyper = new Typer(context) with ScaladocTyper
+
+ trait ScaladocTyper extends Typer {
+ private def unit = context.unit
+
+ override def canAdaptConstantTypeToLiteral = false
+
+ override protected def macroImplementationNotFoundMessage(name: Name): String = (
+ super.macroImplementationNotFoundMessage(name)
+ + "\nWhen generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ )
+
+ override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = {
+ val sym = docDef.symbol
+
+ if ((sym ne null) && (sym ne NoSymbol)) {
+ val comment = docDef.comment
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ val typer1 = newTyper(context.makeNewScope(docDef, context.owner))
+ for (useCase <- comment.useCases) {
+ typer1.silent(_ => typer1 defineUseCases useCase) match {
+ case SilentTypeError(err) =>
+ unit.warning(useCase.pos, err.errMsg)
+ case _ =>
+ }
+ for (useCaseSym <- useCase.defined) {
+ if (sym.name != useCaseSym.name)
+ unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
+ }
+ }
+ }
+
+ super.typedDocDef(docDef, mode, pt)
+ }
+
+ def defineUseCases(useCase: UseCase): List[Symbol] = {
+ def stringParser(str: String): syntaxAnalyzer.Parser = {
+ val file = new BatchSourceFile(context.unit.source.file, str) {
+ override def positionInUltimateSource(pos: Position) = {
+ pos.withSource(context.unit.source, useCase.pos.start)
+ }
+ }
+ newUnitParser(new CompilationUnit(file))
+ }
+
+ val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
+ val enclClass = context.enclClass.owner
+
+ def defineAlias(name: Name) = (
+ if (context.scope.lookup(name) == NoSymbol) {
+ lookupVariable(name.toString.substring(1), enclClass) foreach { repl =>
+ silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt =>
+ val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
+ val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
+ val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
+ alias setInfo newInfo
+ context.scope.enter(alias)
+ }
+ }
+ }
+ )
+
+ for (tree <- trees; t <- tree)
+ t match {
+ case Ident(name) if name startsWith '$' => defineAlias(name)
+ case _ =>
+ }
+
+ useCase.aliases = context.scope.toList
+ namer.enterSyms(trees)
+ typedStats(trees, NoSymbol)
+ useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
+
+ if (settings.debug.value)
+ useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
+
+ useCase.defined
+ }
+ }
+}
+
+abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer {
+ import global._
+
+ class ScaladocJavaUnitParser(unit: CompilationUnit) extends {
+ override val in = new ScaladocJavaUnitScanner(unit)
+ } with JavaUnitParser(unit) { }
+
+ class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) {
+ /** buffer for the documentation comment
+ */
+ var docBuffer: StringBuilder = null
+
+ /** add the given character to the documentation buffer
+ */
+ protected def putDocChar(c: Char) {
+ if (docBuffer ne null) docBuffer.append(c)
+ }
+
+ override protected def skipComment(): Boolean = {
+ if (in.ch == '/') {
+ do {
+ in.next
+ } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
+ true
+ } else if (in.ch == '*') {
+ docBuffer = null
+ in.next
+ val scalaDoc = ("/**", "*/")
+ if (in.ch == '*')
+ docBuffer = new StringBuilder(scalaDoc._1)
+ do {
+ do {
+ if (in.ch != '*' && in.ch != SU) {
+ in.next; putDocChar(in.ch)
+ }
+ } while (in.ch != '*' && in.ch != SU)
+ while (in.ch == '*') {
+ in.next; putDocChar(in.ch)
+ }
+ } while (in.ch != '/' && in.ch != SU)
+ if (in.ch == '/') in.next
+ else incompleteInputError("unclosed comment")
+ true
+ } else {
+ false
+ }
+ }
+ }
+
+ class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
+
+ private var docBuffer: StringBuilder = null // buffer for comments
+ private var docPos: Position = NoPosition // last doc comment position
+ private var inDocComment = false
+
+ override def discardDocBuffer() = {
+ val doc = flushDoc
+ if (doc ne null)
+ unit.warning(docPos, "discarding unmoored doc comment")
+ }
+
+ override def flushDoc(): DocComment = {
+ if (docBuffer eq null) null
+ else try DocComment(docBuffer.toString, docPos) finally docBuffer = null
+ }
+
+ override protected def putCommentChar() {
+ if (inDocComment)
+ docBuffer append ch
+
+ nextChar()
+ }
+ override def skipDocComment(): Unit = {
+ inDocComment = true
+ docBuffer = new StringBuilder("/**")
+ super.skipDocComment()
+ }
+ override def skipBlockComment(): Unit = {
+ inDocComment = false
+ docBuffer = new StringBuilder("/*")
+ super.skipBlockComment()
+ }
+ override def skipComment(): Boolean = {
+ super.skipComment() && {
+ if (docBuffer ne null) {
+ if (inDocComment)
+ foundDocComment(docBuffer.toString, offset, charOffset - 2)
+ else
+ try foundComment(docBuffer.toString, offset, charOffset - 2) finally docBuffer = null
+ }
+ true
+ }
+ }
+ def foundComment(value: String, start: Int, end: Int) {
+ val pos = new RangePosition(unit.source, start, start, end)
+ unit.comment(pos, value)
+ }
+ def foundDocComment(value: String, start: Int, end: Int) {
+ docPos = new RangePosition(unit.source, start, start, end)
+ unit.comment(docPos, value)
+ }
+ }
+ class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
+ override def newScanner() = new ScaladocUnitScanner(unit, patches)
+ override def withPatches(patches: List[BracePatch]) = new ScaladocUnitParser(unit, patches)
+
+ override def joinComment(trees: => List[Tree]): List[Tree] = {
+ val doc = in.flushDoc
+ if ((doc ne null) && doc.raw.length > 0) {
+ log(s"joinComment(doc=$doc)")
+ val joined = trees map {
+ t =>
+ DocDef(doc, t) setPos {
+ if (t.pos.isDefined) {
+ val pos = doc.pos.withEnd(t.pos.endOrPoint)
+ // always make the position transparent
+ pos.makeTransparent
+ } else {
+ t.pos
+ }
+ }
+ }
+ joined.find(_.pos.isOpaqueRange) foreach {
+ main =>
+ val mains = List(main)
+ joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
+ }
+ joined
+ }
+ else trees
+ }
+ }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
new file mode 100644
index 0000000000..20f24dc753
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -0,0 +1,49 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import scala.reflect.internal.Chars._
+import symtab._
+import reporters.Reporter
+import typechecker.Analyzer
+import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+
+trait ScaladocGlobalTrait extends Global {
+ outer =>
+
+ override val useOffsetPositions = false
+ override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil)
+
+ override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) {
+ val runsAfter = List[String]()
+ val runsRightAfter = None
+ }
+ override lazy val loaders = new SymbolLoaders {
+ val global: outer.type = outer
+
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ override protected def signalError(root: Symbol, ex: Throwable) {
+ log(s"Suppressing error involving $root: $ex")
+ }
+ }
+}
+
+class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
+ override protected def computeInternalPhases() {
+ phasesSet += syntaxAnalyzer
+ phasesSet += analyzer.namerFactory
+ phasesSet += analyzer.packageObjects
+ phasesSet += analyzer.typerFactory
+ }
+ override def forScaladoc = true
+ override lazy val analyzer = new {
+ val global: ScaladocGlobal.this.type = ScaladocGlobal.this
+ } with ScaladocAnalyzer
+}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index 02630a99b2..90b94e1336 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -11,7 +11,7 @@ import scala.language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
* @param error A function that prints a string to the appropriate error stream
- * @param print A function that prints the string, without any extra boilerplate of error */
+ * @param printMsg A function that prints the string, without any extra boilerplate of error */
class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
/** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
@@ -315,10 +315,10 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
/** Common conversion targets that affect any class in Scala */
val commonConversionTargets = Set(
- "scala.Predef.any2stringfmt",
- "scala.Predef.any2stringadd",
- "scala.Predef.any2ArrowAssoc",
- "scala.Predef.any2Ensuring",
+ "scala.Predef.StringFormat",
+ "scala.Predef.StringAdd",
+ "scala.Predef.ArrowAssoc",
+ "scala.Predef.Ensuring",
"scala.collection.TraversableOnce.alternateImplicit")
/** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
index d3e5c869e0..9447e36610 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
@@ -15,7 +15,7 @@ trait Uncompilable {
val global: Global
val settings: Settings
- import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
+ import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, DocComment, NoSymbol }
import global.definitions.AnyRefClass
import global.rootMirror.RootClass
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
index 11520c810e..11520c810e 100644
--- a/src/compiler/scala/tools/nsc/doc/Universe.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index f509c63ba0..2064d86860 100755
--- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2012 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Manohar Jonnalagedda
*/
@@ -8,25 +8,21 @@ package doc
package base
import base.comment._
-import reporters.Reporter
import scala.collection._
import scala.util.matching.Regex
-import scala.annotation.switch
-import scala.reflect.internal.util.{NoPosition, Position}
+import scala.reflect.internal.util.Position
import scala.language.postfixOps
/** The comment parser transforms raw comment strings into `Comment` objects.
* Call `parse` to run the parser. Note that the parser is stateless and
* should only be built once for a given Scaladoc run.
*
- * @param reporter The reporter on which user messages (error, warnings) should be printed.
- *
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
trait CommentFactoryBase { this: MemberLookupBase =>
val global: Global
- import global.{ reporter, definitions, Symbol }
+ import global.{ reporter, Symbol }
/* Creates comments with necessary arguments */
def createComment (
@@ -66,7 +62,6 @@ trait CommentFactoryBase { this: MemberLookupBase =>
val note = note0
val example = example0
val constructor = constructor0
- val source = source0
val inheritDiagram = inheritDiagram0
val contentDiagram = contentDiagram0
val groupDesc = groupDesc0
@@ -233,11 +228,11 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false)
else if (!before.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: ls, false)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false)
else if (!after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, true)
+ parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true)
else lastTagKey match {
case Some(key) =>
val value =
@@ -245,18 +240,18 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case Some(b :: bs) => (b + endOfLine + marker) :: bs
case None => oops("lastTagKey set when no tag exists for key")
}
- parse0(docBody, tags + (key -> value), lastTagKey, ls, true)
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true)
case None =>
- parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true)
}
case CodeBlockEndRegex(before, marker, after) :: ls =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true)
if (!before.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: ls, true)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true)
else if (!after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, false)
+ parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false)
else lastTagKey match {
case Some(key) =>
val value =
@@ -264,9 +259,9 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case Some(b :: bs) => (b + endOfLine + marker) :: bs
case None => oops("lastTagKey set when no tag exists for key")
}
- parse0(docBody, tags + (key -> value), lastTagKey, ls, false)
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false)
case None =>
- parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false)
}
case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
@@ -380,7 +375,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
- parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false)
+ parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), inCodeBlock = false)
}
@@ -456,7 +451,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
else {
jumpWhitespace()
jump(style)
- val p = Paragraph(inline(false))
+ val p = Paragraph(inline(isInlineEnd = false))
blockEnded("end of list line ")
Some(p)
}
@@ -515,11 +510,11 @@ trait CommentFactoryBase { this: MemberLookupBase =>
def para(): Block = {
val p =
if (summaryParsed)
- Paragraph(inline(false))
+ Paragraph(inline(isInlineEnd = false))
else {
val s = summary()
val r =
- if (checkParaEnded) List(s) else List(s, inline(false))
+ if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false))
summaryParsed = true
Paragraph(Chain(r))
}
@@ -683,7 +678,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
def link(): Inline = {
val SchemeUri = """([a-z]+:.*)""".r
jump("[[")
- var parens = 2 + repeatJump('[')
+ val parens = 2 + repeatJump('[')
val start = "[" * parens
val stop = "]" * parens
//println("link with " + parens + " matching parens")
@@ -729,7 +724,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
*/
def normalizeIndentation(_code: String): String = {
- var code = _code.trim
+ val code = _code.trim
var maxSkip = Integer.MAX_VALUE
var crtSkip = 0
var wsArea = true
@@ -884,20 +879,6 @@ trait CommentFactoryBase { this: MemberLookupBase =>
count
}
- final def jumpUntil(chars: String): Int = {
- assert(chars.length > 0)
- var count = 0
- val c = chars.charAt(0)
- while (!check(chars) && char != endOfText) {
- nextChar()
- while (char != c && char != endOfText) {
- nextChar()
- count += 1
- }
- }
- count
- }
-
final def jumpUntil(pred: => Boolean): Int = {
var count = 0
while (!pred && char != endOfText) {
diff --git a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
index c11179800c..c11179800c 100755
--- a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
index cdcfeaae81..8d80333195 100755
--- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -88,7 +88,7 @@ trait MemberLookupBase {
// (4) if we still haven't found anything, create a tooltip
Tooltip(query)
case List(l) => l
- case links =>
+ case links =>
val chosen = chooseLink(links)
def linkToString(link: LinkTo) = {
val chosenInfo =
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
index 02e662da85..2a07547de2 100755
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
@@ -10,8 +10,6 @@ package comment
import scala.collection._
-import java.net.URL
-
/** A body of text. A comment has a single body, which is composed of
* at least one block. Inside every body is exactly one summary (see
* [[scala.tools.nsc.doc.model.comment.Summary]]). */
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
index 2b28164ca4..a3d05ae50b 100644
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
@@ -102,9 +102,6 @@ abstract class Comment {
/** A usage example related to the entity. */
def example: List[Body]
- /** The comment as it appears in the source text. */
- def source: Option[String]
-
/** A description for the primary constructor */
def constructor: Option[Body]
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
index 735b79c336..42b56aa927 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
@@ -21,7 +21,7 @@ abstract class Generator {
/** Outputs documentation (as a side effect). */
def generate(): Unit = {
assert(checks forall { check => check() })
- generateImpl
+ generateImpl()
}
/** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
index 0cdd47182f..0cdd47182f 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
index ee8b7809e5..ee8b7809e5 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
index 3aa3e87554..21c5f6bb67 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
@@ -13,7 +13,7 @@ import doclet._
class Doclet extends Generator with Universer with Indexer {
def generateImpl() {
- new html.HtmlFactory(universe, index).generate
+ new html.HtmlFactory(universe, index).generate()
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index 4630c3dda8..d721a96ad7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -103,7 +103,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
/** Generates the Scaladoc site for a model into the site root.
* A scaladoc site is a set of HTML and related files
* that document a model extracted from a compiler run.
- * @param model The model to generate in the form of a sequence of packages. */
+ */
def generate() {
def copyResource(subPath: String) {
@@ -111,7 +111,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
val p = "/scala/tools/nsc/doc/html/resource/" + subPath
val inputStream = getClass.getResourceAsStream(p)
assert(inputStream != null, p)
- }.toByteArray
+ }.toByteArray()
val dest = Directory(siteRoot) / subPath
dest.parent.createDirectory()
val out = dest.toFile.bufferedOutput()
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 69da322418..229e26d699 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -11,7 +11,7 @@ import base._
import base.comment._
import model._
-import scala.xml.{XML, NodeSeq}
+import scala.xml.NodeSeq
import scala.xml.dtd.{DocType, PublicID}
import scala.collection._
import java.io.Writer
@@ -123,7 +123,7 @@ abstract class HtmlPage extends Page { thisPage =>
case Text(text) => scala.xml.Text(text)
case Summary(in) => inlineToHtml(in)
case HtmlTag(tag) => scala.xml.Unparsed(tag)
- case EntityLink(target, link) => linkToHtml(target, link, true)
+ case EntityLink(target, link) => linkToHtml(target, link, hasLinks = true)
}
def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match {
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
index 62166f7def..91939cf3de 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
@@ -45,7 +45,7 @@ abstract class Page {
/** Writes this page as a file. The file's location is relative to the
* generator's site root, and the encoding is also defined by the generator.
- * @param generator The generator that is writing this page. */
+ * @param site The generator that is writing this page. */
def writeFor(site: HtmlFactory): Unit
def kindToString(mbr: MemberEntity) =
@@ -84,16 +84,10 @@ abstract class Page {
}
/** A relative link from this page to some destination class entity.
- * @param destEntity The class or object entity that the link will point to. */
+ * @param destClass The class or object entity that the link will point to. */
def relativeLinkTo(destClass: TemplateEntity): String =
relativeLinkTo(templateToPath(destClass))
- /** A relative link from this page to some destination page in the Scaladoc site.
- * @param destPage The page that the link will point to. */
- def relativeLinkTo(destPage: HtmlPage): String = {
- relativeLinkTo(destPage.path)
- }
-
/** A relative link from this page to some destination path.
* @param destPath The path that the link will point to. */
def relativeLinkTo(destPath: List[String]): String = {
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
index ee78f4ea7a..5781e680dd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -29,8 +29,8 @@ private[html] object SyntaxHigh {
/** Annotations, sorted alphabetically */
val annotations = Array(
"BeanProperty", "SerialVersionUID",
- "beanGetter", "beanSetter", "bridge", "cloneable",
- "deprecated", "deprecatedName",
+ "beanGetter", "beanSetter", "bridge",
+ "deprecated", "deprecatedName", "deprecatedOverriding", "deprecatedInheritance",
"elidable", "field", "getter", "inline",
"migration", "native", "noinline", "param",
"remote", "setter", "specialized", "strictfp", "switch",
@@ -40,7 +40,7 @@ private[html] object SyntaxHigh {
/** Standard library classes/objects, sorted alphabetically */
val standards = Array (
- "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
+ "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Array",
"Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
"Console", "Double", "Enumeration", "Float", "Function", "Int",
"List", "Long", "Manifest", "Map",
@@ -107,14 +107,14 @@ private[html] object SyntaxHigh {
case '/' =>
if (star) {
if (level > 0) level -= 1
- if (level == 0) i else multiline(i+1, true)
+ if (level == 0) i else multiline(i+1, star = true)
} else
- multiline(i+1, false)
+ multiline(i+1, star = false)
case _ =>
- multiline(i+1, false)
+ multiline(i+1, star = false)
}
}
- if (buf(i) == '/') line(i) else multiline(i, true)
+ if (buf(i) == '/') line(i) else multiline(i, star = true)
out.toString
}
@@ -129,16 +129,16 @@ private[html] object SyntaxHigh {
out append ch
ch match {
case '\\' =>
- charlit0(i+1, true)
+ charlit0(i+1, bslash = true)
case '\'' if !bslash =>
i
case _ =>
- if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, true)
- else charlit0(i+1, false)
+ if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, bslash = true)
+ else charlit0(i+1, bslash = false)
}
}
}
- charlit0(j, false)
+ charlit0(j, bslash = false)
out.toString
}
@@ -150,14 +150,14 @@ private[html] object SyntaxHigh {
out append ch
ch match {
case '\\' =>
- strlit0(i+1, true)
+ strlit0(i+1, bslash = true)
case '"' if !bslash =>
i
case _ =>
- strlit0(i+1, false)
+ strlit0(i+1, bslash = false)
}
}
- strlit0(i, false)
+ strlit0(i, bslash = false)
out.toString
}
@@ -183,7 +183,7 @@ private[html] object SyntaxHigh {
ch match {
case 'e' | 'E' =>
out append ch
- expo(i+1, false)
+ expo(i+1, signed = false)
case _ =>
if (Character.isDigit(ch)) {
out append ch
@@ -197,7 +197,7 @@ private[html] object SyntaxHigh {
ch match {
case '+' | '-' if !signed =>
out append ch
- expo(i+1, true)
+ expo(i+1, signed = true)
case _ =>
if (Character.isDigit(ch)) {
out append ch
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
index c76bdc58d9..c034647320 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
@@ -9,10 +9,8 @@ package html
package page
import model._
-
import scala.collection._
import scala.xml._
-import scala.util.parsing.json.{JSONObject, JSONArray}
class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
index a205e02533..e3c94505ab 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc.doc.html.page
import scala.tools.nsc.doc
import scala.tools.nsc.doc.model.{Package, DocTemplateEntity}
import scala.tools.nsc.doc.html.{Page, HtmlFactory}
-import java.nio.channels.Channels
import scala.util.parsing.json.{JSONObject, JSONArray}
class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
index a74c2eedbd..a74c2eedbd 100755
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
index 68289b7474..37145756d9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
@@ -8,8 +8,7 @@ package doc
package html
package page
-import model._
-import scala.xml.{NodeSeq, Unparsed}
+import scala.xml.NodeSeq
import java.io.File
class Source(sourceFile: File) extends HtmlPage {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 5cbb14b486..63509de4b5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -13,8 +13,6 @@ import base.comment._
import model._
import model.diagram._
-import diagram._
-
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
import scala.language.postfixOps
@@ -112,8 +110,8 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<h1>{ displayName }</h1>
</div>
- { signature(tpl, true) }
- { memberToCommentHtml(tpl, tpl.inTemplate, true) }
+ { signature(tpl, isSelf = true) }
+ { memberToCommentHtml(tpl, tpl.inTemplate, isSelf = true) }
<div id="mbrsel">
<div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
@@ -244,7 +242,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
<div class="conversion" name={ conversion.conversionQualifiedName }>
<h3>Inherited by implicit conversion { conversion.conversionShortName } from
- { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) }
+ { typeToHtml(tpl.resultType, hasLinks = true) } to { typeToHtml(conversion.targetType, hasLinks = true) }
</h3>
</div>
)
@@ -286,14 +284,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
- val memberComment = memberToCommentHtml(mbr, inTpl, false)
+ val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
data-isabs={ mbr.isAbstract.toString }
fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
group={ mbr.group }>
<a id={ mbr.signature }/>
<a id={ mbr.signatureCompat }/>
- { signature(mbr, false) }
+ { signature(mbr, isSelf = false) }
{ memberComment }
</li>
}
@@ -400,7 +398,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case Some(conv) =>
<dt class="implicit">Implicit information</dt> ++
{
- val targetType = typeToHtml(conv.targetType, true)
+ val targetType = typeToHtml(conv.targetType, hasLinks = true)
val conversionMethod = conv.convertorMethod match {
case Left(member) => Text(member.name)
case Right(name) => Text(name)
@@ -426,7 +424,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
<dd>
- This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, true) } to
+ This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, hasLinks = true) } to
{ targetType } performed by method { conversionMethod } in { conversionOwner }.
{ constraintText }
</dd>
@@ -488,7 +486,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case nte: NonTemplateMemberEntity if nte.isUseCase =>
<div class="full-signature-block toggleContainer">
<span class="toggle">Full Signature</span>
- <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,true) }</div>
+ <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,isSelf = true) }</div>
</div>
case _ => NodeSeq.Empty
}
@@ -525,7 +523,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val sourceLink: NodeSeq = mbr match {
case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
- val (absFile, line) = dtpl.inSource.get
+ val (absFile, _) = dtpl.inSource.get
<dt>Source</dt>
<dd>{ <a href={ dtpl.sourceUrl.get.toString } target="_blank">{ Text(absFile.file.getName) }</a> }</dd>
case _ => NodeSeq.Empty
@@ -653,7 +651,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case dtpl: DocTemplateEntity if isSelf && !isReduced =>
val diagram = f(dtpl)
if (diagram.isDefined) {
- val s = universe.settings
val diagramSvg = generator.generate(diagram.get, tpl, this)
if (diagramSvg != NodeSeq.Empty) {
<div class="toggleContainer block diagram-container" id={ id + "-container"}>
@@ -763,7 +760,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (isReduced) NodeSeq.Empty else {
def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
def param0(vl: ValueParam): NodeSeq =
- // notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
+ // notice the }{ in the next lines, they are necessary to avoid an undesired withspace in output
<span name={ vl.name }>{
Text(vl.name)
}{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{
@@ -926,13 +923,13 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (tpl.universe.settings.useStupidTypes.value)
superTpl match {
case dtpl: DocTemplateEntity =>
- val sig = signature(dtpl, false, true) \ "_"
+ val sig = signature(dtpl, isSelf = false, isReduced = true) \ "_"
sig
case tpl: TemplateEntity =>
Text(tpl.name)
}
else
- typeToHtml(superType, true)
+ typeToHtml(superType, hasLinks = true)
private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
case ktcc: KnownTypeClassConstraint =>
@@ -944,21 +941,21 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
case impl: ImplicitInScopeConstraint =>
- scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope")
+ scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, hasLinks = true) ++ scala.xml.Text(" is in scope")
case eq: EqualTypeParamConstraint =>
scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
- typeToHtml(eq.rhs, true) ++ scala.xml.Text(")")
+ typeToHtml(eq.rhs, hasLinks = true) ++ scala.xml.Text(")")
case bt: BoundedTypeParamConstraint =>
scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
- typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++
- typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")")
+ typeToHtml(bt.lowerBound, hasLinks = true) ++ scala.xml.Text(" <: ") ++
+ typeToHtml(bt.upperBound, hasLinks = true) ++ scala.xml.Text(")")
case lb: LowerBoundedTypeParamConstraint =>
scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
- typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")")
+ typeToHtml(lb.lowerBound, hasLinks = true) ++ scala.xml.Text(")")
case ub: UpperBoundedTypeParamConstraint =>
scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
- typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")")
+ typeToHtml(ub.upperBound, hasLinks = true) ++ scala.xml.Text(")")
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
index 61c1819d11..61c1819d11 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
index ec00cace75..ec00cace75 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 847367838c..837d9e6f21 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -10,7 +10,6 @@ package diagram
import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
import scala.collection.immutable._
-import javax.xml.parsers.SAXParser
import model._
import model.diagram._
@@ -22,8 +21,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private var pathToLib: String = null
// maps nodes to unique indices
private var node2Index: Map[Node, Int] = null
- // maps an index to its corresponding node
- private var index2Node: Map[Int, Node] = null
// true if the current diagram is a class diagram
private var isInheritanceDiagram = false
// incoming implicit nodes (needed for determining the CSS class of a node)
@@ -34,7 +31,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private var counter = 0
def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
- counter = counter + 1;
+ counter = counter + 1
this.page = page
pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
val dot = generateDot(diagram)
@@ -42,7 +39,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
// clean things up a bit, so we don't leave garbage on the heap
this.page = null
node2Index = null
- index2Node = null
incomingImplicitNodes = List()
result
}
@@ -116,7 +112,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
node2Index = d.nodes.zipWithIndex.toMap
incomingImplicitNodes = List()
}
- index2Node = node2Index map {_.swap}
val implicitsDot = {
if (!isInheritanceDiagram) ""
@@ -212,10 +207,10 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private def node2Dot(node: Node) = {
// escape HTML characters in node names
- def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;");
+ def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
// assemble node attribues in a map
- var attr = scala.collection.mutable.Map[String, String]()
+ val attr = scala.collection.mutable.Map[String, String]()
// link
node.doctpl match {
@@ -320,13 +315,13 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
* Calls dot with a given dot string and returns the SVG output.
*/
private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
- val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template)
+ val dotOutput = DiagramGenerator.getDotRunner().feedToDot(dotInput, template)
var tSVG = -System.currentTimeMillis
val result = if (dotOutput != null) {
- val src = scala.io.Source.fromString(dotOutput);
+ val src = scala.io.Source.fromString(dotOutput)
try {
- val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
+ val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, preserveWS = false)
val doc = cpa.document()
if (doc != null)
transform(doc.docElem)
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
index 5cdd5c74a4..2fa1bf62f3 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
@@ -10,12 +10,10 @@ import java.io.InputStreamReader
import java.io.OutputStreamWriter
import java.io.BufferedWriter
import java.io.BufferedReader
-import java.io.IOException
import scala.sys.process._
import scala.concurrent.SyncVar
import model._
-import model.diagram._
/** This class takes care of running the graphviz dot utility */
class DotRunner(settings: doc.Settings) {
@@ -183,7 +181,7 @@ class DotProcess(settings: doc.Settings) {
private[this] def outputFn(stdOut: InputStream): Unit = {
val reader = new BufferedReader(new InputStreamReader(stdOut))
- var buffer: StringBuilder = new StringBuilder()
+ val buffer: StringBuilder = new StringBuilder()
try {
var line = reader.readLine
while (!error && line != null) {
@@ -209,7 +207,6 @@ class DotProcess(settings: doc.Settings) {
private[this] def errorFn(stdErr: InputStream): Unit = {
val reader = new BufferedReader(new InputStreamReader(stdErr))
- var buffer: StringBuilder = new StringBuilder()
try {
var line = reader.readLine
while (line != null) {
@@ -225,4 +222,4 @@ class DotProcess(settings: doc.Settings) {
errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n")
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
index 7229603ae5..7229603ae5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
index b2f2935dc9..b2f2935dc9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
index 97edbd49db..97edbd49db 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
index cb1f638a58..cb1f638a58 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
index 9d7aec792b..9d7aec792b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
index 5dd6e38d2e..5dd6e38d2e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
index 2e3f5ea530..2e3f5ea530 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
index 4be145d0af..4be145d0af 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
index 69038337a7..69038337a7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
index 36c43be3a2..36c43be3a2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
index 5fe33f72f5..5fe33f72f5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
index 478f2e38ac..478f2e38ac 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
index 0e8c893315..0e8c893315 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
index 4d740f3b17..4d740f3b17 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
index b9b49076a6..b9b49076a6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
index f127e35b48..f127e35b48 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
index 63a1ae8349..63a1ae8349 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
index 542ba4aa5a..542ba4aa5a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
index b5075c16cd..b5075c16cd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
index d613cf5633..d613cf5633 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
index ae2f85823b..ae2f85823b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
index a0d93f4844..a0d93f4844 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
index 55fb370a41..55fb370a41 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
index 70073b272a..70073b272a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
index faab0cf1a3..faab0cf1a3 100755
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
index bc3fbc81b2..bc3fbc81b2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
index 4dd48675b7..4dd48675b7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
index 4688d633fe..4688d633fe 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
index 9b32288e04..9b32288e04 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
index fd0ad06e81..fd0ad06e81 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
index ad312793ea..ad312793ea 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
index 67ffca79de..67ffca79de 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
index 6e9f2f743f..6e9f2f743f 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
index 7502942eb6..7502942eb6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
index c777bfce8d..c777bfce8d 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
index 7502942eb6..7502942eb6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
index 848dd5963a..848dd5963a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
index 34a04249ee..34a04249ee 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
index 2ed33b0aa4..2ed33b0aa4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
index 6ea17ac320..6ea17ac320 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
index 529aa93188..529aa93188 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
index 00c3378a2a..00c3378a2a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
index d30dbad858..d30dbad858 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
index 7d64b9c5c5..7d64b9c5c5 100755
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
index 4625f9df74..4625f9df74 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
index 3764f82ccb..3764f82ccb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
index 4417f5b438..4417f5b438 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
index bc29efb3e6..bc29efb3e6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
index 8313f4975b..8313f4975b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
index 04eda2f307..04eda2f307 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
index c89765239e..c89765239e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
index bf984ef0ba..bf984ef0ba 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
index a790bb1169..a790bb1169 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
index b6ac4415e4..b6ac4415e4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
index 9aae5ba0aa..9aae5ba0aa 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index b066027f04..b066027f04 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
index 6d1caf6d50..6d1caf6d50 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
index 0af34eca4c..0af34eca4c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
index fb961a2eda..fb961a2eda 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
index 625d9251cb..625d9251cb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
index 88983254ce..88983254ce 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
index d0cd7fd512..d0cd7fd512 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
index 6c6e1fe2f5..6c6e1fe2f5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
index 04c8794e92..04c8794e92 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
index d8152529fd..d8152529fd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
index 3b5c47c9e3..3b5c47c9e3 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
index ef2615bacc..ef2615bacc 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
index 2fcc77b2e8..2fcc77b2e8 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
index d5ac639405..d5ac639405 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
index 2a949311d7..2a949311d7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
index 17d1caeb66..17d1caeb66 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
index 9ba89146c0..574d6b04f8 100644
--- a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
@@ -18,8 +18,6 @@ import scala.language.postfixOps
* Call `parse` to run the parser. Note that the parser is stateless and
* should only be built once for a given Scaladoc run.
*
- * @param reporter The reporter on which user messages (error, warnings) should be printed.
- *
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
trait CommentFactory extends base.CommentFactoryBase {
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index cbc1a23d44..924f203a59 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -23,10 +23,6 @@ import diagram._
* - type and value parameters;
* - annotations. */
trait Entity {
-
- /** Similar to symbols, so we can track entities */
- def id: Int
-
/** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName`
* instead. */
def name : String
@@ -59,9 +55,6 @@ trait Entity {
/** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */
def isType: Boolean
-
- /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */
- def isTerm: Boolean
}
object Entity {
@@ -97,9 +90,6 @@ trait TemplateEntity extends Entity {
/** Whether documentation is available for this template. */
def isDocTemplate: Boolean
- /** Whether documentation is available for this template. */
- def isNoDocMemberTemplate: Boolean
-
/** Whether this template is a case class. */
def isCaseClass: Boolean
@@ -149,9 +139,6 @@ trait MemberEntity extends Entity {
/** Some migration warning if this member has a migration annotation, or none otherwise. */
def migration: Option[Body]
- @deprecated("Use `inDefinitionTemplates` instead", "2.9.0")
- def inheritedFrom: List[TemplateEntity]
-
/** For members representing values: the type of the value returned by this member; for members
* representing types: the type itself. */
def resultType: TypeEntity
@@ -177,12 +164,6 @@ trait MemberEntity extends Entity {
/** Whether this member is an abstract type. */
def isAbstractType: Boolean
- /** Whether this member is a template. */
- def isTemplate: Boolean
-
- /** Whether this member is implicit. */
- def isImplicit: Boolean
-
/** Whether this member is abstract. */
def isAbstract: Boolean
@@ -384,14 +365,9 @@ trait RootPackage extends Package
/** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */
trait NonTemplateMemberEntity extends MemberEntity {
-
/** Whether this member is a use case. A use case is a member which does not exist in the documented code.
* It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */
def isUseCase: Boolean
-
- /** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons
- * and should not appear in ScalaDoc. */
- def isBridge: Boolean
}
@@ -506,12 +482,6 @@ trait ImplicitConversion {
/** The result type after the conversion */
def targetType: TypeEntity
- /** The result type after the conversion
- * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement
- * types. Need to check it's not option!
- */
- def targetTemplate: Option[TemplateEntity]
-
/** The components of the implicit conversion type parents */
def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 4ee6daf73e..1272906df5 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -17,8 +17,6 @@ object IndexModelFactory {
object result extends mutable.HashMap[Char,SymbolMap] {
- /* Owner template ordering */
- implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
implicit def orderingMap = math.Ordering.String
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index 23259a4ae8..23259a4ae8 100644
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index 0a469c9227..1df725636a 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -43,20 +43,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def modelFinished: Boolean = _modelFinished
private var universe: Universe = null
- private def dbg(msg: String) = if (sys.props contains "scala.scaladoc.debug") println(msg)
- protected def closestPackage(sym: Symbol) = {
- if (sym.isPackage || sym.isPackageClass) sym
- else sym.enclosingPackage
- }
-
- private def printWithoutPrefix(memberSym: Symbol, templateSym: Symbol) = {
- dbg(
- "memberSym " + memberSym + " templateSym " + templateSym + " encls = " +
- closestPackage(memberSym) + ", " + closestPackage(templateSym)
- )
- memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym))
- }
-
def makeModel: Option[Universe] = {
val universe = new Universe { thisUniverse =>
thisFactory.universe = thisUniverse
@@ -65,7 +51,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
_modelFinished = true
// complete the links between model entities, everthing that couldn't have been done before
- universe.rootPackage.completeModel
+ universe.rootPackage.completeModel()
Some(universe) filter (_.rootPackage != null)
}
@@ -86,7 +72,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity {
- val id = { ids += 1; ids }
val name = optimize(sym.nameString)
val universe = thisFactory.universe
@@ -100,7 +85,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def annotations = sym.annotations.map(makeAnnotation)
def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
def isType = sym.name.isTypeName
- def isTerm = sym.name.isTermName
}
trait TemplateImpl extends EntityImpl with TemplateEntity {
@@ -112,7 +96,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isObject = sym.isModule && !sym.isPackage
def isCaseClass = sym.isCaseClass
def isRootPackage = false
- def isNoDocMemberTemplate = false
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
}
@@ -127,18 +110,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None
}
- def group = if (comment.isDefined) comment.get.group.getOrElse(defaultGroup) else defaultGroup
+ def group = comment flatMap (_.group) getOrElse defaultGroup
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
- def inDefinitionTemplates = this match {
- case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
- mb.useCaseOf.get.inDefinitionTemplates
- case _ =>
- if (inTpl == null)
- List(makeRootPackage)
- else
- makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
- }
+ def inDefinitionTemplates =
+ if (inTpl == null)
+ List(makeRootPackage)
+ else
+ makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
def visibility = {
if (sym.isPrivateLocal) PrivateInInstance()
else if (sym.isProtectedLocal) ProtectedInInstance()
@@ -149,8 +128,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else None
if (sym.isPrivate) PrivateInTemplate(inTpl)
else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl)
- else if (qual.isDefined) PrivateInTemplate(qual.get)
- else Public()
+ else qual match {
+ case Some(q) => PrivateInTemplate(q)
+ case None => Public()
+ }
}
}
def flags = {
@@ -189,9 +170,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
})
else
None
- def inheritedFrom =
- if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else
- makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) })
+
def resultType = {
def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone
case PolyType(_, res) => resultTpe(res)
@@ -199,14 +178,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case NullaryMethodType(res) => resultTpe(res)
case _ => tpe
}
- val tpe = if (!isImplicitlyInherited) sym.tpe else byConversion.get.toType memberInfo sym
+ val tpe = byConversion.fold(sym.tpe) (_.toType memberInfo sym)
makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
}
def isDef = false
def isVal = false
def isLazyVal = false
def isVar = false
- def isImplicit = sym.isImplicit
def isConstructor = false
def isAliasType = false
def isAbstractType = false
@@ -214,7 +192,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// for the explanation of conversion == null see comment on flags
((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) ||
sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
- def isTemplate = false
+
def signature = externalSignature(sym)
lazy val signatureCompat = {
@@ -255,8 +233,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* exists, but should not be documented (either it's not included in the source or it's not visible)
*/
class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate {
- assert(modelFinished)
- assert(!(noDocTemplatesCache isDefinedAt sym))
+ assert(modelFinished, this)
+ assert(!(noDocTemplatesCache isDefinedAt sym), (sym, noDocTemplatesCache(sym)))
noDocTemplatesCache += (sym -> this)
def isDocTemplate = false
}
@@ -268,25 +246,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
*/
abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity {
// no templates cache for this class, each owner gets its own instance
- override def isTemplate = true
def isDocTemplate = false
- override def isNoDocMemberTemplate = true
lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
- // Seems unused
- // def parentTemplates =
- // if (sym.isPackage || sym == AnyClass)
- // List()
- // else
- // sym.tpe.parents.flatMap { tpe: Type =>
- // val tSym = tpe.typeSymbol
- // if (tSym != NoSymbol)
- // List(makeTemplate(tSym))
- // else
- // List()
- // } filter (_.isInstanceOf[DocTemplateEntity])
-
def parentTypes =
if (sym.isPackage || sym == AnyClass) List() else {
val tps = (this match {
@@ -306,7 +269,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* All ancestors of the template and all non-package members.
*/
abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity {
- assert(!modelFinished)
+ assert(!modelFinished, (sym, inTpl))
assert(!(docTemplatesCache isDefinedAt sym), sym)
docTemplatesCache += (sym -> this)
@@ -394,9 +357,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList
// the inherited templates (classes, traits or objects)
- var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
+ val memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
// the direct members (methods, values, vars, types and directly contained templates)
- var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
+ val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
// the members generated by the symbols in memberSymsEager
val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this)))
@@ -419,7 +382,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (!sym.isAliasType && !sym.isAbstractType)
for (member <- members)
member match {
- case d: DocTemplateImpl => d.completeModel
+ case d: DocTemplateImpl => d.completeModel()
case _ =>
}
@@ -442,17 +405,16 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
conversions flatMap (conv =>
if (!implicitExcluded(conv.conversionQualifiedName))
conv.targetTypeComponents map {
- case pair@(template, tpe) =>
+ case (template, tpe) =>
template match {
case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv)
case _ => // nothing
}
- (pair._1, pair._2, conv)
+ (template, tpe, conv)
}
else List()
)
- override def isTemplate = true
override def isDocTemplate = true
private[this] lazy val companionSymbol =
if (sym.isAliasType || sym.isAbstractType) {
@@ -527,31 +489,26 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
override lazy val comment = {
val inRealTpl =
- /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
- * 1. the target of the implicit conversion
- * 2. the definition template (owner)
- * 3. the current template
- */
- if (conversion.isDefined) findTemplateMaybe(conversion.get.toType.typeSymbol) match {
- case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
- case _ => findTemplateMaybe(sym.owner) match {
- case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
- case _ => inTpl
- }
- } else inTpl
- if (inRealTpl != null) thisFactory.comment(sym, None, inRealTpl) else None
+ conversion.fold(Option(inTpl)) { conv =>
+ /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
+ * 1. the target of the implicit conversion
+ * 2. the definition template (owner)
+ * 3. the current template
+ */
+ findTemplateMaybe(conv.toType.typeSymbol) filterNot (_ == makeRootPackage) orElse (
+ findTemplateMaybe(sym.owner) filterNot (_ == makeRootPackage) orElse Option(inTpl)
+ )
+ }
+ inRealTpl flatMap (thisFactory.comment(sym, None, _))
}
+ override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates)
+
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
lazy val definitionName = {
- // this contrived name is here just to satisfy some older tests -- if you decide to remove it, be my guest, and
- // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break
- // the test suite...
- val packageObject = if (inPackageObject) ".package" else ""
- if (!conversion.isDefined) optimize(inDefinitionTemplates.head.qualifiedName + packageObject + "#" + name)
- else optimize(conversion.get.conversionQualifiedName + packageObject + "#" + name)
+ val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName)
+ optimize(qualifiedName + "#" + name)
}
- def isBridge = sym.isBridge
def isUseCase = useCaseOf.isDefined
override def byConversion: Option[ImplicitConversionImpl] = conversion
override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined }
@@ -564,7 +521,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) {
def valueParams = {
- val info = if (!isImplicitlyInherited) sym.info else conversion.get.toType memberInfo sym
+ val info = conversion.fold(sym.info)(_.toType memberInfo sym)
info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
}}
@@ -666,7 +623,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
*/
def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
// don't call this after the model finished!
- assert(!modelFinished)
+ assert(!modelFinished, (aSym, inTpl))
def createRootPackageComment: Option[Comment] =
if(settings.docRootContent.isDefault) None
@@ -674,7 +631,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
import Streamable._
Path(settings.docRootContent.value) match {
case f : File => {
- val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition, Option(inTpl)))
+ val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, Option(inTpl)))
Some(rootComment)
}
case _ => None
@@ -682,7 +639,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
- assert(!modelFinished) // only created BEFORE the model is finished
+ assert(!modelFinished, (bSym, inTpl)) // only created BEFORE the model is finished
if (bSym.isAliasType && bSym != AnyRefClass)
new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true }
else if (bSym.isAbstractType)
@@ -713,7 +670,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
override def inTemplate = this
override def toRoot = this :: Nil
override def qualifiedName = "_root_"
- override def inheritedFrom = Nil
override def isRootPackage = true
override lazy val memberSyms =
(bSym.info.members ++ EmptyPackage.info.members).toList filter { s =>
@@ -783,7 +739,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- /** Get the root package */
def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl]
// TODO: Should be able to override the type
@@ -860,16 +815,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
- val tplSym = normalizeTemplate(aSym.owner)
+ normalizeTemplate(aSym.owner)
inTpl.members.find(_.sym == aSym)
}
- @deprecated("Use `findLinkTarget` instead.", "2.10.0")
- def findTemplate(query: String): Option[DocTemplateImpl] = {
- assert(modelFinished)
- docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject }
- }
-
def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = {
assert(modelFinished)
docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_))
@@ -880,20 +829,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = {
assert(modelFinished)
- def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- noDocTemplatesCache.get(bSym) match {
- case Some(noDocTpl) => noDocTpl
- case None => new NoDocTemplateImpl(bSym, inTpl)
- }
- }
+ def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl =
+ noDocTemplatesCache getOrElse (aSym, new NoDocTemplateImpl(aSym, inTpl))
- findTemplateMaybe(aSym) match {
- case Some(dtpl) =>
- dtpl
- case None =>
- val bSym = normalizeTemplate(aSym)
- makeNoDocTemplate(bSym, if (inTpl.isDefined) inTpl.get else makeTemplate(bSym.owner))
+ findTemplateMaybe(aSym) getOrElse {
+ val bSym = normalizeTemplate(aSym)
+ makeNoDocTemplate(bSym, inTpl getOrElse makeTemplate(bSym.owner))
}
}
@@ -904,24 +845,28 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
lazy val annotationClass =
makeTemplate(annot.symbol)
val arguments = { // lazy
- def noParams = annot.args map { _ => None }
+ def annotArgs = annot.args match {
+ case Nil => annot.assocs collect { case (_, LiteralAnnotArg(const)) => Literal(const) }
+ case xs => xs
+ }
+ def noParams = annotArgs map (_ => None)
+
val params: List[Option[ValueParam]] = annotationClass match {
case aClass: DocTemplateEntity with Class =>
(aClass.primaryConstructor map { _.valueParams.head }) match {
case Some(vps) => vps map { Some(_) }
- case None => noParams
+ case _ => noParams
}
case _ => noParams
}
- assert(params.length == annot.args.length)
- (params zip annot.args) flatMap { case (param, arg) =>
- makeTree(arg) match {
- case Some(tree) =>
- Some(new ValueArgument {
- def parameter = param
- def value = tree
- })
- case None => None
+ assert(params.length == annotArgs.length, (params, annotArgs))
+
+ params zip annotArgs flatMap { case (param, arg) =>
+ makeTree(arg) map { tree =>
+ new ValueArgument {
+ def parameter = param
+ def value = tree
+ }
}
}
}
@@ -995,10 +940,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
val filtParents =
// we don't want to expose too many links to AnyRef, that will just be redundant information
- if (tpl.isDefined && { val sym = tpl.get.sym; (!sym.isModule && parents.length < 2) || (sym == AnyValClass) || (sym == AnyRefClass) || (sym == AnyClass) })
- parents
- else
- parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
+ tpl match {
+ case Some(tpl) if (!tpl.sym.isModule && parents.length < 2) || (tpl.sym == AnyValClass) || (tpl.sym == AnyRefClass) || (tpl.sym == AnyClass) => parents
+ case _ => parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
+ }
/** Returns:
* - a DocTemplate if the type's symbol is documented
@@ -1029,9 +974,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = {
- val stop = if (relativeTo.isDefined) relativeTo.get.ownerChain.toSet else Set[Symbol]()
+ val stop = relativeTo map (_.ownerChain.toSet) getOrElse Set[Symbol]()
var sym1 = sym
- var path = new StringBuilder()
+ val path = new StringBuilder()
// var path = List[Symbol]()
while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index f88251b22e..868c2fc3a4 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -11,12 +11,7 @@ package doc
package model
import scala.collection._
-import scala.util.matching.Regex
-
import symtab.Flags
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
/**
* This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
@@ -56,7 +51,6 @@ trait ModelFactoryImplicitSupport {
import global._
import global.analyzer._
import global.definitions._
- import rootMirror.{RootPackage, RootClass, EmptyPackage, EmptyPackageClass}
import settings.hardcoded
// debugging:
@@ -71,7 +65,7 @@ trait ModelFactoryImplicitSupport {
* class A[T]
* class B extends A[Int]
* class C extends A[String]
- * implicit def pimpA[T: Numeric](a: A[T]): D
+ * implicit def enrichA[T: Numeric](a: A[T]): D
* }}}
* For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
* conversion from C to D, depending on -implicits-show-all, the conversion can:
@@ -94,9 +88,9 @@ trait ModelFactoryImplicitSupport {
// But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
else {
- var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
+ val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
- val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
+ val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams)
var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
// also keep empty conversions, so they appear in diagrams
// conversions = conversions.filter(!_.members.isEmpty)
@@ -107,7 +101,7 @@ trait ModelFactoryImplicitSupport {
hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
// Filter out non-sensical conversions from value types
- if (isPrimitiveValueType(sym.tpe))
+ if (isPrimitiveValueType(sym.tpe_*))
conversions = conversions.filter((ic: ImplicitConversionImpl) =>
hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
@@ -127,13 +121,13 @@ trait ModelFactoryImplicitSupport {
* What? in details:
* - say we start from a class A[T1, T2, T3, T4]
* - we have an implicit function (view) in scope:
- * def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): PimpedA
- * - A is converted to PimpedA ONLY if a couple of constraints are satisfied:
+ * def enrichA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): EnrichedA
+ * - A is converted to EnrichedA ONLY if a couple of constraints are satisfied:
* * T1 must be equal to Int
* * T2 must be equal to Foo[Bar[X]]
* * T3 must be upper bounded by Long
* * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope
- * - the final type is PimpedA and A therefore inherits a couple of members from pimpedA
+ * - the final type is EnrichedA and A therefore inherits a couple of members from enrichA
*
* How?
* some notes:
@@ -176,7 +170,7 @@ trait ModelFactoryImplicitSupport {
val newContext = context.makeImplicit(context.ambiguousErrors)
newContext.macrosEnabled = false
val newTyper = global.analyzer.newTyper(newContext)
- newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
+ newTyper.silent(_.typed(appliedTree, EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
case global.analyzer.SilentResultValue(t: Tree) => t
case global.analyzer.SilentTypeError(err) =>
@@ -349,15 +343,6 @@ trait ModelFactoryImplicitSupport {
makeRootPackage
}
- def targetTemplate: Option[TemplateEntity] = toType match {
- // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types
- // such as refinement types because the template can't represent the type corectly (a template corresponds to a
- // package, class, trait or object)
- case t: TypeRef => Some(makeTemplate(t.sym))
- case RefinedType(parents, decls) => None
- case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None
- }
-
def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
def convertorMethod: Either[MemberEntity, String] = {
@@ -385,7 +370,6 @@ trait ModelFactoryImplicitSupport {
lazy val memberImpls: List[MemberImpl] = {
// Obtain the members inherited by the implicit conversion
val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList
- val existingSyms = sym.info.members
// Debugging part :)
debug(sym.nameString + "\n" + "=" * sym.nameString.length())
@@ -422,66 +406,52 @@ trait ModelFactoryImplicitSupport {
/* ========================= HELPER METHODS ========================== */
/**
* Computes the shadowing table for all the members in the implicit conversions
- * @param mbrs All template's members, including usecases and full signature members
+ * @param members All template's members, including usecases and full signature members
* @param convs All the conversions the template takes part in
- * @param inTpl the ususal :)
+ * @param inTpl the usual :)
*/
- def makeShadowingTable(mbrs: List[MemberImpl],
+ def makeShadowingTable(members: List[MemberImpl],
convs: List[ImplicitConversionImpl],
inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = {
assert(modelFinished)
- var shadowingTable = Map[MemberEntity, ImplicitMemberShadowing]()
+ val shadowingTable = mutable.Map[MemberEntity, ImplicitMemberShadowing]()
+ val membersByName: Map[Name, List[MemberImpl]] = members.groupBy(_.sym.name)
+ val convsByMember = (Map.empty[MemberImpl, ImplicitConversionImpl] /: convs) {
+ case (map, conv) => map ++ conv.memberImpls.map (_ -> conv)
+ }
for (conv <- convs) {
- val otherConvs = convs.filterNot(_ == conv)
+ val otherConvMembers: Map[Name, List[MemberImpl]] = convs filterNot (_ == conv) flatMap (_.memberImpls) groupBy (_.sym.name)
for (member <- conv.memberImpls) {
- // for each member in our list
val sym1 = member.sym
val tpe1 = conv.toType.memberInfo(sym1)
- // check if it's shadowed by a member in the original class
- var shadowedBySyms: List[Symbol] = List()
- for (mbr <- mbrs) {
- val sym2 = mbr.sym
- if (sym1.name == sym2.name) {
- val shadowed = !settings.docImplicitsSoundShadowing.value || {
- val tpe2 = inTpl.sym.info.memberInfo(sym2)
- !isDistinguishableFrom(tpe1, tpe2)
- }
- if (shadowed)
- shadowedBySyms ::= sym2
- }
+ // check if it's shadowed by a member in the original class.
+ val shadowed = membersByName.get(sym1.name).toList.flatten filter { other =>
+ !settings.docImplicitsSoundShadowing.value || !isDistinguishableFrom(tpe1, inTpl.sym.info.memberInfo(other.sym))
}
- val shadowedByMembers = mbrs.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym))
-
- // check if it's shadowed by another member
- var ambiguousByMembers: List[MemberEntity] = List()
- for (conv <- otherConvs)
- for (member2 <- conv.memberImpls) {
- val sym2 = member2.sym
- if (sym1.name == sym2.name) {
- val tpe2 = conv.toType.memberInfo(sym2)
- // Ambiguity should be an equivalence relation
- val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
- if (ambiguated)
- ambiguousByMembers ::= member2
- }
- }
+ // check if it's shadowed by another conversion.
+ val ambiguous = otherConvMembers.get(sym1.name).toList.flatten filter { other =>
+ val tpe2 = convsByMember(other).toType.memberInfo(other.sym)
+ !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
+ }
// we finally have the shadowing info
- val shadowing = new ImplicitMemberShadowing {
- def shadowingMembers: List[MemberEntity] = shadowedByMembers
- def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers
- }
+ if (!shadowed.isEmpty || !ambiguous.isEmpty) {
+ val shadowing = new ImplicitMemberShadowing {
+ def shadowingMembers: List[MemberEntity] = shadowed
+ def ambiguatingMembers: List[MemberEntity] = ambiguous
+ }
- shadowingTable += (member -> shadowing)
+ shadowingTable += (member -> shadowing)
+ }
}
}
- shadowingTable
+ shadowingTable.toMap
}
@@ -511,25 +481,25 @@ trait ModelFactoryImplicitSupport {
/**
* Make implicits explicit - Not used curently
*/
- object implicitToExplicit extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case MethodType(params, resultType) =>
- MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
- case other =>
- other
- }
- }
+ // object implicitToExplicit extends TypeMap {
+ // def apply(tp: Type): Type = mapOver(tp) match {
+ // case MethodType(params, resultType) =>
+ // MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
+ // case other =>
+ // other
+ // }
+ // }
/**
* removeImplicitParameters transforms implicit parameters from the view result type into constraints and
* returns the simplified type of the view
*
* for the example view:
- * implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * implicit def enrichMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T]
* the implicit view result type is:
- * (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * (a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T]
* and the simplified type will be:
- * MyClass[T] => PimpedMyClass[T]
+ * MyClass[T] => EnrichedMyClass[T]
*/
def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
index 844a509b7e..99e9059d79 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -8,13 +8,6 @@ import base._
import diagram._
import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
trait ModelFactoryTypeSupport {
@@ -28,14 +21,11 @@ trait ModelFactoryTypeSupport {
import global._
import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
- import rootMirror.{ RootPackage, RootClass, EmptyPackage }
protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
/** */
def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
- def templatePackage = closestPackage(inTpl.sym)
-
def createTypeEntity = new TypeEntity {
private var nameBuffer = new StringBuilder
private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)]
@@ -234,7 +224,6 @@ trait ModelFactoryTypeSupport {
def appendClauses = {
nameBuffer append " forSome {"
var first = true
- val qset = quantified.toSet
for (sym <- quantified) {
if (!first) { nameBuffer append ", " } else first = false
if (sym.isSingletonExistential) {
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
index 5b4ec4a40b..5b4ec4a40b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
index bd7534ded4..b972649194 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -21,7 +21,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
def makeTree(rhs: Tree): Option[TreeEntity] = {
- var expr = new StringBuilder
+ val expr = new StringBuilder
var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
rhs.pos match {
@@ -39,7 +39,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
* stores it in tree.refs with its position
*/
def makeLink(rhs: Tree){
- var start = pos.startOrPoint - firstIndex
+ val start = pos.startOrPoint - firstIndex
val end = pos.endOrPoint - firstIndex
if(start != end) {
var asym = rhs.symbol
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
index cf5c1fb3fb..cf5c1fb3fb 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
index f712869a4b..f712869a4b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
index 22580805aa..22580805aa 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
index c2aa1f17f3..150b293b81 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
@@ -36,20 +36,12 @@ case class InheritanceDiagram(thisNode: ThisNode,
override def isInheritanceDiagram = true
lazy val depthInfo = new DepthInfo {
def maxDepth = 3
- def nodeDepth(node: Node) =
- if (node == thisNode) 1
- else if (superClasses.contains(node)) 0
- else if (subClasses.contains(node)) 2
- else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1
- else -1
}
}
trait DepthInfo {
/** Gives the maximum depth */
def maxDepth: Int
- /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */
- def nodeDepth(node: Node): Int
}
abstract class Node {
@@ -142,5 +134,4 @@ class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo {
}
val maxDepth = _maxDepth
- def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
index cd60865ce7..6395446d3b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -6,9 +6,6 @@ import model._
import java.util.regex.{Pattern, Matcher}
import scala.util.matching.Regex
-// statistics
-import html.page.diagram.DiagramStats
-
/**
* This trait takes care of parsing @{inheritance, content}Diagram annotations
*
@@ -66,7 +63,7 @@ trait DiagramDirectiveParser {
NoDiagramAtAll
if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true)
+ makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, isInheritanceDiagram = true)
else
defaultFilter
}
@@ -75,7 +72,7 @@ trait DiagramDirectiveParser {
def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false)
+ makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, isInheritanceDiagram = false)
else
defaultFilter
}
@@ -153,7 +150,6 @@ trait DiagramDirectiveParser {
private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
- private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex)
// And the composed regexes:
private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
@@ -182,7 +178,7 @@ trait DiagramDirectiveParser {
def warning(message: String) = {
// we need the position from the package object (well, ideally its comment, but yeah ...)
val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
- assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage))
+ assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage))
global.reporter.warning(sym.pos, message)
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index 175b4a6472..ebac25bbe4 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -3,7 +3,6 @@ package model
package diagram
import model._
-import scala.collection.mutable
// statistics
import html.page.diagram.DiagramStats
@@ -47,7 +46,7 @@ trait DiagramFactory extends DiagramDirectiveParser {
val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")"))
// superclasses
- var superclasses: List[Node] =
+ val superclasses: List[Node] =
tpl.parentTypes.collect {
case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))()
}.reverse
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
index b9abff69d8..f0a9caac15 100644
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
@@ -5,11 +5,10 @@
package scala.tools.partest
-import scala.tools.partest._
-import java.io._
+import scala.tools.nsc
import scala.tools.nsc._
import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.doc.{Settings, DocFactory, Universe}
+import scala.tools.nsc.doc.{ DocFactory, Universe }
import scala.tools.nsc.doc.model._
import scala.tools.nsc.doc.model.diagram._
import scala.tools.nsc.doc.base.comment._
@@ -80,14 +79,14 @@ abstract class ScaladocModelTest extends DirectTest {
System.setErr(prevErr)
}
- private[this] var settings: Settings = null
+ private[this] var settings: doc.Settings = null
// create a new scaladoc compiler
private[this] def newDocFactory: DocFactory = {
- settings = new Settings(_ => ())
+ settings = new doc.Settings(_ => ())
settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
val args = extraSettings + " " + scaladocSettings
- val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
+ new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think
val docFact = new DocFactory(new ConsoleReporter(settings), settings)
docFact
}
@@ -98,7 +97,6 @@ abstract class ScaladocModelTest extends DirectTest {
// so we don't get the newSettings warning
override def isDebug = false
-
// finally, enable easy navigation inside the entities
object access {
diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties
index 5fcbfaf0f1..961c60f48c 100644
--- a/src/scalap/decoder.properties
+++ b/src/scalap/decoder.properties
@@ -1,2 +1,2 @@
version.number=2.0.1
-copyright.string=(c) 2002-2011 LAMP/EPFL
+copyright.string=(c) 2002-2013 LAMP/EPFL
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index a151e3067e..9f139cb5ea 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -87,7 +87,7 @@ object Arguments {
i += 2
}
} else {
- var iter = prefixes.iterator
+ val iter = prefixes.iterator
val j = i
while ((i == j) && iter.hasNext) {
val prefix = iter.next
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
index 1500b81050..489a05ecd0 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
@@ -50,7 +50,7 @@ trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) {
lazy val choices = Rule.this :: other :: Nil
}
- def orError[In2 <: In] = this orElse(error[In2])
+ def orError[In2 <: In] = this orElse error[Any]
def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) = orElse(other)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index aa5acbb06d..fd70e0de35 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -167,57 +167,10 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val symbolInfo = nameRef ~ symbolRef ~ nat ~ (symbolRef?) ~ ref ~ get ^~~~~~^ SymbolInfo
- def symHeader(key: Int) = (key -~ none | (key + 64) -~ nat)
+ def symHeader(key: Int): EntryParser[Any] = (key -~ none | (key + 64) -~ nat)
def symbolEntry(key : Int) = symHeader(key) -~ symbolInfo
- /***************************************************
- * Symbol table attribute format:
- * Symtab = nentries_Nat {Entry}
- * Entry = 1 TERMNAME len_Nat NameInfo
- * | 2 TYPENAME len_Nat NameInfo
- * | 3 NONEsym len_Nat
- * | 4 TYPEsym len_Nat SymbolInfo
- * | 5 ALIASsym len_Nat SymbolInfo
- * | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
- * | 7 MODULEsym len_Nat SymbolInfo
- * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
- * | 9 EXTref len_Nat name_Ref [owner_Ref]
- * | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
- * | 11 NOtpe len_Nat
- * | 12 NOPREFIXtpe len_Nat
- * | 13 THIStpe len_Nat sym_Ref
- * | 14 SINGLEtpe len_Nat type_Ref sym_Ref
- * | 15 CONSTANTtpe len_Nat constant_Ref
- * | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
- * | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
- * | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
- * | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
- * | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
- * | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
- * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
- * | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
- * | 24 LITERALunit len_Nat
- * | 25 LITERALboolean len_Nat value_Long
- * | 26 LITERALbyte len_Nat value_Long
- * | 27 LITERALshort len_Nat value_Long
- * | 28 LITERALchar len_Nat value_Long
- * | 29 LITERALint len_Nat value_Long
- * | 30 LITERALlong len_Nat value_Long
- * | 31 LITERALfloat len_Nat value_Long
- * | 32 LITERALdouble len_Nat value_Long
- * | 33 LITERALstring len_Nat name_Ref
- * | 34 LITERALnull len_Nat
- * | 35 LITERALclass len_Nat tpe_Ref
- * | 36 LITERALenum len_Nat sym_Ref
- * | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
- * | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
- * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
- * | 43 ANNOTINFO len_Nat AnnotInfoBody
- * | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
- * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
- * | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
- */
val noSymbol = 3 -^ NoSymbol
val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol"
val aliasSymbol = symbolEntry(5) ^^ AliasSymbol as "alias"
@@ -260,10 +213,9 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
22 -~ typeRef ~ (symbolRef*) ^~^ MethodType,
42 -~ typeRef ~ (attribTreeRef*) ^~^ AnnotatedType,
51 -~ typeRef ~ symbolRef ~ (attribTreeRef*) ^~~^ AnnotatedWithSelfType,
- 47 -~ typeLevel ~ typeIndex ^~^ DeBruijnIndexType,
48 -~ typeRef ~ (symbolRef*) ^~^ ExistentialType) as "type"
- lazy val literal = oneOf(
+ lazy val literal: EntryParser[Any] = oneOf(
24 -^ (()),
25 -~ longValue ^^ (_ != 0L),
26 -~ longValue ^^ (_.toByte),
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index cfe615a6d5..e5a4ff649e 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -70,7 +70,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
}
def isCaseClassObject(o: ObjectSymbol): Boolean = {
- val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+ val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
o.isFinal && (classSymbol.children.find(x => x.isCase && x.isInstanceOf[MethodSymbol]) match {
case Some(_) => true
case None => false
@@ -167,7 +167,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
print("object ")
val poName = o.symbolInfo.owner.name
print(processName(poName))
- val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+ val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
printType(classSymbol)
print(" {\n")
printChildren(level, classSymbol)
@@ -179,7 +179,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
printModifiers(o)
print("object ")
print(processName(o.name))
- val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+ val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
printType(classSymbol)
print(" {\n")
printChildren(level, classSymbol)
@@ -191,7 +191,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val j = str.indexOf("[")
if (j > 0) str = str.substring(0, j)
str = StringUtil.trimStart(str, "=> ")
- var i = str.lastIndexOf(".")
+ val i = str.lastIndexOf(".")
val res = if (i > 0) str.substring(i + 1) else str
if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase
})
@@ -381,7 +381,6 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
toString(typeRef, sep)
}
case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep)
- //case DeBruijnIndexType(typeLevel, typeIndex) =>
case ExistentialType(typeRef, symbols) => {
val refs = symbols.map(toString _).filter(!_.startsWith("_")).map("type " + _)
toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "")
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
index 543ddbe186..0444e701f2 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
@@ -22,5 +22,4 @@ case class PolyType(typeRef : Type, symbols : Seq[TypeSymbol]) extends Type
case class PolyTypeWithCons(typeRef : Type, symbols : Seq[TypeSymbol], cons: String) extends Type
case class AnnotatedType(typeRef : Type, attribTreeRefs : List[Int]) extends Type
case class AnnotatedWithSelfType(typeRef : Type, symbol : Symbol, attribTreeRefs : List[Int]) extends Type
-case class DeBruijnIndexType(typeLevel : Int, typeIndex : Int) extends Type
case class ExistentialType(typeRef : Type, symbols : Seq[Symbol]) extends Type
diff --git a/src/swing/scala/swing/Button.scala b/src/swing/scala/swing/Button.scala
index f10d49d804..0170727e3b 100644
--- a/src/swing/scala/swing/Button.scala
+++ b/src/swing/scala/swing/Button.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
object Button {
diff --git a/src/swing/scala/swing/ButtonGroup.scala b/src/swing/scala/swing/ButtonGroup.scala
index 2075df7c92..0b04d20837 100644
--- a/src/swing/scala/swing/ButtonGroup.scala
+++ b/src/swing/scala/swing/ButtonGroup.scala
@@ -8,9 +8,7 @@
package scala.swing
-import event._
-import javax.swing.{AbstractButton => JAbstractButton,Icon}
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/**
* A button mutex. At most one of its associated buttons is selected
diff --git a/src/swing/scala/swing/ColorChooser.scala b/src/swing/scala/swing/ColorChooser.scala
new file mode 100644
index 0000000000..591e652f1c
--- /dev/null
+++ b/src/swing/scala/swing/ColorChooser.scala
@@ -0,0 +1,45 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.swing
+
+import javax.swing.JColorChooser
+import event._
+
+/**
+ * Wrapper for JColorChooser. Publishes `ColorChanged` events, when the color selection changes.
+ *
+ * @author andy@hicks.net
+ * @author Ingo Maier
+ * @see javax.swing.JColorChooser
+ */
+object ColorChooser {
+ def showDialog(parent: Component, title: String, color: Color): scala.Option[Color] = {
+ toOption[Color](javax.swing.JColorChooser.showDialog(parent.peer, title, color))
+ }
+}
+
+class ColorChooser(color0: Color) extends Component {
+ def this() = this(java.awt.Color.white)
+
+ override lazy val peer: JColorChooser = new JColorChooser(color0) with SuperMixin
+
+ peer.getSelectionModel.addChangeListener(new javax.swing.event.ChangeListener {
+ def stateChanged(e: javax.swing.event.ChangeEvent) {
+ publish(ColorChanged(ColorChooser.this, peer.getColor))
+ }
+ })
+
+ def color: Color = peer.getColor
+ def color_=(c: Color) = peer.setColor(c)
+
+ def dragEnabled: Boolean = peer.getDragEnabled
+ def dragEnabled_=(b: Boolean) = peer.setDragEnabled(b)
+} \ No newline at end of file
diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala
index 5b70f6fda9..ce2b3ba6fb 100644
--- a/src/swing/scala/swing/ComboBox.scala
+++ b/src/swing/scala/swing/ComboBox.scala
@@ -182,7 +182,7 @@ class ComboBox[A](items: Seq[A]) extends Component with Publisher {
* of the component to its own defaults _after_ the renderer has been
* configured. That's Swing's principle of most suprise.
*/
- def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getRenderer)
+ def renderer: ListView.Renderer[A] = ListView.Renderer.wrap[A](peer.getRenderer)
def renderer_=(r: ListView.Renderer[A]) { peer.setRenderer(r.peer) }
/* XXX: currently not safe to expose:
diff --git a/src/swing/scala/swing/EditorPane.scala b/src/swing/scala/swing/EditorPane.scala
index b8c506daf0..9b1aab7874 100644
--- a/src/swing/scala/swing/EditorPane.scala
+++ b/src/swing/scala/swing/EditorPane.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
package scala.swing
-import event._
import javax.swing._
import javax.swing.text._
-import java.awt.event._
/**
* A text component that allows multiline text input and display.
diff --git a/src/swing/scala/swing/Font.scala.disabled b/src/swing/scala/swing/Font.scala.disabled
deleted file mode 100644
index 9e21eb859c..0000000000
--- a/src/swing/scala/swing/Font.scala.disabled
+++ /dev/null
@@ -1,70 +0,0 @@
-package scala.swing
-
-/*object Font {
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
- def decode(str: String) = java.awt.Font.decode(str)
-
- /* TODO: finish implementation
- /**
- * See [java.awt.Font.getFont].
- */
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
- java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
- import java.{util => ju}
- private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(t : ClassTag[A]) extends ju.AbstractMap[A, B] {
- self =>
- override def size = underlying.size
-
- override def put(k : A, v : B) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
- override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
- def size = self.size
-
- def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
- val ui = underlying.iterator
- var prev : Option[A] = None
-
- def hasNext = ui.hasNext
-
- def next = {
- val (k, v) = ui.next
- prev = Some(k)
- new ju.Map.Entry[A, B] {
- def getKey = k
- def getValue = v
- def setValue(v1 : B) = self.put(k, v1)
- override def equals(other : Any) = other match {
- case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
- case _ => false
- }
- }
- }
-
- def remove = prev match {
- case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
- case _ => throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
- }
- */
-
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String) = java.awt.Font.getFont(nm)
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
- def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
-}*/ \ No newline at end of file
diff --git a/src/swing/scala/swing/FormattedTextField.scala b/src/swing/scala/swing/FormattedTextField.scala
index 311ff42d0a..b08075850c 100644
--- a/src/swing/scala/swing/FormattedTextField.scala
+++ b/src/swing/scala/swing/FormattedTextField.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
-import java.awt.event._
object FormattedTextField {
/**
diff --git a/src/swing/scala/swing/ListView.scala b/src/swing/scala/swing/ListView.scala
index 40639aa9e2..d0c4e45190 100644
--- a/src/swing/scala/swing/ListView.scala
+++ b/src/swing/scala/swing/ListView.scala
@@ -216,7 +216,7 @@ class ListView[A] extends Component {
def adjusting = peer.getSelectionModel.getValueIsAdjusting
}
- def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getCellRenderer)
+ def renderer: ListView.Renderer[A] = ListView.Renderer.wrap[A](peer.getCellRenderer)
def renderer_=(r: ListView.Renderer[A]) { peer.setCellRenderer(r.peer) }
def fixedCellWidth = peer.getFixedCellWidth
diff --git a/src/swing/scala/swing/MainFrame.scala b/src/swing/scala/swing/MainFrame.scala
index 85ce0755ac..1dfc155f9c 100644
--- a/src/swing/scala/swing/MainFrame.scala
+++ b/src/swing/scala/swing/MainFrame.scala
@@ -6,12 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
-
/**
* A frame that can be used for main application windows. Shuts down the
* framework and quits the application when closed.
diff --git a/src/swing/scala/swing/PasswordField.scala b/src/swing/scala/swing/PasswordField.scala
index d2fdd0d38a..fd0b586a0f 100644
--- a/src/swing/scala/swing/PasswordField.scala
+++ b/src/swing/scala/swing/PasswordField.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
-import java.awt.event._
/**
* A password field, that displays a replacement character for each character in the password.
diff --git a/src/swing/scala/swing/PopupMenu.scala b/src/swing/scala/swing/PopupMenu.scala
new file mode 100644
index 0000000000..e82c3a1b28
--- /dev/null
+++ b/src/swing/scala/swing/PopupMenu.scala
@@ -0,0 +1,65 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.swing
+
+import javax.swing.JPopupMenu
+import javax.swing.event.{PopupMenuListener, PopupMenuEvent}
+import event._
+
+/**
+ * A popup menu.
+ *
+ * Example usage:
+ *
+ * {{{
+ * val popupMenu = new PopupMenu {
+ * contents += new Menu("menu 1") {
+ * contents += new RadioMenuItem("radio 1.1")
+ * contents += new RadioMenuItem("radio 1.2")
+ * }
+ * contents += new Menu("menu 2") {
+ * contents += new RadioMenuItem("radio 2.1")
+ * contents += new RadioMenuItem("radio 2.2")
+ * }
+ * }
+ * val button = new Button("Show Popup Menu")
+ * reactions += {
+ * case e: ButtonClicked => popupMenu.show(button, 0, button.bounds.height)
+ * }
+ * listenTo(button)
+ * }}}
+ *
+ * @author John Sullivan
+ * @author Ingo Maier
+ * @see javax.swing.JPopupMenu
+ */
+class PopupMenu extends Component with SequentialContainer.Wrapper with Publisher {
+ override lazy val peer: JPopupMenu = new JPopupMenu with SuperMixin
+
+ peer.addPopupMenuListener(new PopupMenuListener {
+ def popupMenuCanceled(e: PopupMenuEvent) {
+ publish(PopupMenuCanceled(PopupMenu.this))
+ }
+ def popupMenuWillBecomeInvisible(e: PopupMenuEvent) {
+ publish(PopupMenuWillBecomeInvisible(PopupMenu.this))
+ }
+ def popupMenuWillBecomeVisible(e: PopupMenuEvent) {
+ publish(PopupMenuWillBecomeVisible(PopupMenu.this))
+ }
+ })
+
+ def show(invoker: Component, x: Int, y: Int): Unit = peer.show(invoker.peer, x, y)
+
+ def margin: Insets = peer.getMargin
+ def label: String = peer.getLabel
+ def label_=(s: String) { peer.setLabel(s) }
+}
+
diff --git a/src/swing/scala/swing/ProgressBar.scala b/src/swing/scala/swing/ProgressBar.scala
index 33dd716524..81e2989c3e 100644
--- a/src/swing/scala/swing/ProgressBar.scala
+++ b/src/swing/scala/swing/ProgressBar.scala
@@ -6,12 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
-
/**
* A bar indicating progress of some action. Can be in indeterminate mode,
* in which it indicates that the action is in progress (usually by some
diff --git a/src/swing/scala/swing/Reactions.scala b/src/swing/scala/swing/Reactions.scala
index d8a62aa99d..c32212cf3a 100644
--- a/src/swing/scala/swing/Reactions.scala
+++ b/src/swing/scala/swing/Reactions.scala
@@ -14,8 +14,6 @@ import event.Event
import scala.collection.mutable.{Buffer, ListBuffer}
object Reactions {
- import scala.ref._
-
class Impl extends Reactions {
private val parts: Buffer[Reaction] = new ListBuffer[Reaction]
def isDefinedAt(e: Event) = parts.exists(_ isDefinedAt e)
diff --git a/src/swing/scala/swing/SplitPane.scala b/src/swing/scala/swing/SplitPane.scala
index dd4f2908d5..f61dfedbf4 100644
--- a/src/swing/scala/swing/SplitPane.scala
+++ b/src/swing/scala/swing/SplitPane.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import Swing._
/**
diff --git a/src/swing/scala/swing/SwingWorker.scala b/src/swing/scala/swing/SwingWorker.scala
deleted file mode 100644
index f4eeb5824a..0000000000
--- a/src/swing/scala/swing/SwingWorker.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala.swing
-
-import scala.actors._
-
-@deprecated("Will be removed in 2.11.0", "2.10.1")
-object SwingWorker {
-
-}
-
-@deprecated("Depends on the deprecated package scala.actors. Will be removed in 2.11.0", "2.10.1")
-abstract class SwingWorker extends Actor {
- def queue() {
-
- }
-
- def done() {
-
- }
-
- private var _cancelled = false
- def cancelled: Boolean = _cancelled
- def cancelled_=(b: Boolean) { _cancelled = b }
-}
diff --git a/src/swing/scala/swing/TabbedPane.scala b/src/swing/scala/swing/TabbedPane.scala
index 338050515a..6e46256f86 100644
--- a/src/swing/scala/swing/TabbedPane.scala
+++ b/src/swing/scala/swing/TabbedPane.scala
@@ -112,9 +112,6 @@ class TabbedPane extends Component with Publisher {
*/
def tabPlacement_=(b: Alignment.Value) { peer.setTabPlacement(b.id) }
- @deprecated("Use tabPlacement_=() instead.", "2.9.1")
- def tabPlacement(b: Alignment.Value) { peer.setTabPlacement(b.id) }
-
/**
* The current page selection
*/
diff --git a/src/swing/scala/swing/TextArea.scala b/src/swing/scala/swing/TextArea.scala
index 01bf115d28..2f6bdca119 100644
--- a/src/swing/scala/swing/TextArea.scala
+++ b/src/swing/scala/swing/TextArea.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
-import java.awt.event._
/**
* A text component that allows multiline text input and display.
diff --git a/src/swing/scala/swing/TextComponent.scala b/src/swing/scala/swing/TextComponent.scala
index 48c03a5f54..4d23399737 100644
--- a/src/swing/scala/swing/TextComponent.scala
+++ b/src/swing/scala/swing/TextComponent.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
import event._
-import javax.swing._
import javax.swing.text._
import javax.swing.event._
diff --git a/src/swing/scala/swing/ToggleButton.scala b/src/swing/scala/swing/ToggleButton.scala
index 3d3d0b957f..8f210d00d8 100644
--- a/src/swing/scala/swing/ToggleButton.scala
+++ b/src/swing/scala/swing/ToggleButton.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
/**
diff --git a/src/swing/scala/swing/Window.scala b/src/swing/scala/swing/Window.scala
index 5bdb50e959..a9f4ae7538 100644
--- a/src/swing/scala/swing/Window.scala
+++ b/src/swing/scala/swing/Window.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
-
package scala.swing
import java.awt.{Window => AWTWindow}
import event._
-import javax.swing._
/**
* A window with decoration such as a title, border, and action buttons.
diff --git a/src/swing/scala/swing/SwingActor.scala b/src/swing/scala/swing/event/ColorChanged.scala
index 035e979bee..6cb35e7755 100644
--- a/src/swing/scala/swing/SwingActor.scala
+++ b/src/swing/scala/swing/event/ColorChanged.scala
@@ -9,7 +9,6 @@
package scala.swing
+package event
-// Dummy to keep ant from recompiling on every run.
-@deprecated("Will be removed in 2.11.0", "2.10.1")
-trait SwingActor { }
+case class ColorChanged(source: Component, c: Color) extends ComponentEvent with SelectionEvent
diff --git a/src/library/scala/SpecializableCompanion.scala b/src/swing/scala/swing/event/PopupMenuEvent.scala
index 1a9ce71d2a..ad39eb351a 100644
--- a/src/library/scala/SpecializableCompanion.scala
+++ b/src/swing/scala/swing/event/PopupMenuEvent.scala
@@ -1,14 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala
-/** A common supertype for companion classes which specialization takes into account.
- */
-@deprecated("Use Specializable instead", "2.10.0")
-private[scala] trait SpecializableCompanion
+
+package scala.swing
+package event
+
+abstract class PopupMenuEvent extends ComponentEvent
+
+case class PopupMenuCanceled(source: PopupMenu) extends PopupMenuEvent
+case class PopupMenuWillBecomeInvisible(source: PopupMenu) extends PopupMenuEvent
+case class PopupMenuWillBecomeVisible(source: PopupMenu) extends PopupMenuEvent \ No newline at end of file
diff --git a/test/attic/files/cli/test1/Main.check.j9vm5 b/test/attic/files/cli/test1/Main.check.j9vm5
deleted file mode 100644
index de454ef478..0000000000
--- a/test/attic/files/cli/test1/Main.check.j9vm5
+++ /dev/null
@@ -1,4 +0,0 @@
-env: -cpp: No such file or directory
-env: test1.Main: No such file or directory
-env: -cp: No such file or directory
-1: test 3 passed
diff --git a/test/attic/files/cli/test1/Main.check.java b/test/attic/files/cli/test1/Main.check.java
deleted file mode 100644
index 64410de98f..0000000000
--- a/test/attic/files/cli/test1/Main.check.java
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.java5 b/test/attic/files/cli/test1/Main.check.java5
deleted file mode 100644
index 64410de98f..0000000000
--- a/test/attic/files/cli/test1/Main.check.java5
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.java5_api b/test/attic/files/cli/test1/Main.check.java5_api
deleted file mode 100644
index 8693a5d92f..0000000000
--- a/test/attic/files/cli/test1/Main.check.java5_api
+++ /dev/null
@@ -1,19 +0,0 @@
-|-- allclasses-frame.html
-|-- allclasses-noframe.html
-|-- constant-values.html
-|-- deprecated-list.html
-|-- help-doc.html
-|-- index-all.html
-|-- index.html
-|-- overview-tree.html
-|-- package-list
-|-- resources
-| `-- inherit.gif
-|-- stylesheet.css
-`-- test1
- |-- Main.html
- |-- package-frame.html
- |-- package-summary.html
- `-- package-tree.html
-
-2 directories, 15 files
diff --git a/test/attic/files/cli/test1/Main.check.java5_j9 b/test/attic/files/cli/test1/Main.check.java5_j9
deleted file mode 100644
index de454ef478..0000000000
--- a/test/attic/files/cli/test1/Main.check.java5_j9
+++ /dev/null
@@ -1,4 +0,0 @@
-env: -cpp: No such file or directory
-env: test1.Main: No such file or directory
-env: -cp: No such file or directory
-1: test 3 passed
diff --git a/test/attic/files/cli/test1/Main.check.javac b/test/attic/files/cli/test1/Main.check.javac
deleted file mode 100644
index ba25d9b6ca..0000000000
--- a/test/attic/files/cli/test1/Main.check.javac
+++ /dev/null
@@ -1,19 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -help Print a synopsis of standard options
-
diff --git a/test/attic/files/cli/test1/Main.check.javac5 b/test/attic/files/cli/test1/Main.check.javac5
deleted file mode 100644
index 0cb29d31ff..0000000000
--- a/test/attic/files/cli/test1/Main.check.javac5
+++ /dev/null
@@ -1,24 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files
- -cp <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -endorseddirs <dirs> Override location of endorsed standards path
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -version Version information
- -help Print a synopsis of standard options
- -X Print a synopsis of nonstandard options
- -J<flag> Pass <flag> directly to the runtime system
-
diff --git a/test/attic/files/cli/test1/Main.check.javac6 b/test/attic/files/cli/test1/Main.check.javac6
deleted file mode 100644
index 8f37a05bcb..0000000000
--- a/test/attic/files/cli/test1/Main.check.javac6
+++ /dev/null
@@ -1,29 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files and annotation processors
- -cp <path> Specify where to find user class files and annotation processors
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -endorseddirs <dirs> Override location of endorsed standards path
- -proc:{none, only} Control whether annotation processing and/or compilation is done.
- -processor <class> Name of the annotation processor to run; bypasses default discovery process
- -processorpath <path> Specify where to find annotation processors
- -d <directory> Specify where to place generated class files
- -s <directory> Specify where to place generated source files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -version Version information
- -help Print a synopsis of standard options
- -A[key[=value]] Options to pass to annotation processors
- -X Print a synopsis of nonstandard options
- -J<flag> Pass <flag> directly to the runtime system
-
diff --git a/test/attic/files/cli/test1/Main.check.jikes b/test/attic/files/cli/test1/Main.check.jikes
deleted file mode 100644
index cd891689db..0000000000
--- a/test/attic/files/cli/test1/Main.check.jikes
+++ /dev/null
@@ -1,3 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
diff --git a/test/attic/files/cli/test1/Main.check.jikes5 b/test/attic/files/cli/test1/Main.check.jikes5
deleted file mode 100644
index cd891689db..0000000000
--- a/test/attic/files/cli/test1/Main.check.jikes5
+++ /dev/null
@@ -1,3 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
diff --git a/test/attic/files/cli/test1/Main.check.scala b/test/attic/files/cli/test1/Main.check.scala
deleted file mode 100644
index 43b200ae02..0000000000
--- a/test/attic/files/cli/test1/Main.check.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed. See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.scala_api b/test/attic/files/cli/test1/Main.check.scala_api
deleted file mode 100644
index 6fac39d3f0..0000000000
--- a/test/attic/files/cli/test1/Main.check.scala_api
+++ /dev/null
@@ -1,33 +0,0 @@
-|-- all-classes.html
-|-- index.html
-|-- modules.html
-|-- nav-classes.html
-|-- root-content.html
-|-- scala
-| |-- Any.html
-| |-- AnyRef.html
-| |-- AnyVal.html
-| |-- Boolean.html
-| |-- Byte.html
-| |-- Char.html
-| |-- Double.html
-| |-- Float.html
-| |-- Int.html
-| |-- Long.html
-| |-- Nothing.html
-| |-- Null.html
-| |-- Short.html
-| |-- Unit.html
-| `-- runtime
-| |-- BoxedFloat.html
-| |-- BoxedInt.html
-| |-- BoxedLong.html
-| `-- BoxedNumber.html
-|-- script.js
-|-- style.css
-|-- test1
-| `-- Main$object.html
-|-- test1$content.html
-`-- test1$package.html
-
-3 directories, 28 files
diff --git a/test/attic/files/cli/test1/Main.check.scala_j9 b/test/attic/files/cli/test1/Main.check.scala_j9
deleted file mode 100644
index 65d5ddaac4..0000000000
--- a/test/attic/files/cli/test1/Main.check.scala_j9
+++ /dev/null
@@ -1,15 +0,0 @@
-unknown option: '-cpp'
-scala [ <compiler-option> | -howtorun:how ]... [<torun> <arguments>]
-
-<compiler-option>'s are as for scalac; see scalac -help.
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
--howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-1: test 1 passed (1)
-1: test 2 passed (1)
-1: test 3 passed (1)
-1: test 4 passed (2)
diff --git a/test/attic/files/cli/test1/Main.check.scalac b/test/attic/files/cli/test1/Main.check.scalac
deleted file mode 100644
index 8465810d0b..0000000000
--- a/test/attic/files/cli/test1/Main.check.scalac
+++ /dev/null
@@ -1,63 +0,0 @@
-scalac error: bad option: '-dd'
- scalac -help gives more information
-Usage: scalac <options | source files>
-where possible options include:
- -doc Generate documentation
- -g:<g> Generate debugging info (none,source,line,vars,notc)
- -nowarn Generate no warnings
- -noassert Generate no assertions and assumptions
- -verbose Output messages about what the compiler is doing
- -classpath <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -windowtitle <windowtitle> Specify window title of generated HTML documentation
- -documenttitle <documenttitle> Specify document title of generated HTML documentation
- -target:<target> Specify which backend to use (jvm-1.5,msil)
- -migrate Assist in migrating from Scala version 1.0
- -o <file> Name of the output assembly (only relevant with -target:msil)
- -r <path> List of assemblies referenced by the program (only relevant with -target:msil)
- -debug Output debugging messages
- -deprecation enable detailed deprecation warnings
- -unchecked enable detailed unchecked warnings
- -statistics Print compiler statistics
- -explaintypes Explain type errors in more detail
- -resident Compiler stays resident, files to compile are read from standard input
- -uniqid Print identifiers with unique names (debugging option)
- -printtypes Print tree types (debugging option)
- -prompt Display a prompt after each error (debugging option)
- -noimports Compile without any implicit imports
- -nopredefs Compile without any implicit predefined values
- -skip:<phase> Skip <phase>
- -check:<phase> Check the tree at start of <phase>
- -print:<phase> Print out program after <phase>
- -printer:<printer> Printer to use (text,html)
- -printfile <file> Specify file in which to print trees
- -graph:<phase> Graph the program after <phase>
- -browse:<phase> Browse the abstract syntax tree after <phase>
- -stop:<phase> Stop after phase <phase>
- -log:<phase> Log operations in <phase>
- -logall Log all operations
- -version Print product version and exit
- -help Print a synopsis of standard options
- -nouescape disables handling of \u unicode escapes
- -Xinline Perform inlining when possible
- -XO Optimize. implies -Xinline, -Xcloselim and -Xdce
- -Xcloselim Perform closure elimination
- -Xdce Perform dead code elimination
- -Xwarndeadcode Emit warnings for dead code
- -XbytecodeRead Enable bytecode reader.
- -Xdetach Perform detaching of remote closures
- -Xshowcls <class> Show class info
- -Xshowobj <object> Show object info
- -Xlinearizer:<Xlinearizer> Linearizer to use (normal,dfs,rpo,dump)
- -Xgenerics Use generic Java types
- -Xprintpos Print tree positions (as offsets)
- -Xscript compile script file
- -Xexperimental enable experimental extensions
- -Xplugtypes parse but ignore annotations in more locations
- -Xkilloption optimizes option types
-
-one error found
diff --git a/test/attic/files/cli/test1/Main.check.scalaint b/test/attic/files/cli/test1/Main.check.scalaint
deleted file mode 100644
index 88345d1874..0000000000
--- a/test/attic/files/cli/test1/Main.check.scalaint
+++ /dev/null
@@ -1,45 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed. See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 1: test 1 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 1: test 2 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 1: test 3 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
diff --git a/test/attic/files/cli/test1/Main.java b/test/attic/files/cli/test1/Main.java
deleted file mode 100644
index 8850b87517..0000000000
--- a/test/attic/files/cli/test1/Main.java
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info no dependency
-package test1;
-public class Main {
- public static void main(String args[]) {
- String arg = (args.length > 0) ? args[0] : "?";
- System.out.println("1: test " + arg + " passed (" + args.length + ")");
- }
-}
diff --git a/test/attic/files/cli/test1/Main.scala b/test/attic/files/cli/test1/Main.scala
deleted file mode 100644
index f7dd8a0a36..0000000000
--- a/test/attic/files/cli/test1/Main.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info no dependency
-package test1
-object Main {
- def main(args: Array[String]) = {
- val arg = if (args != null && args.length > 0) args(0) else "?"
- Console.println("1: test " + arg + " passed (" + args.length + ")")
- }
-}
diff --git a/test/attic/files/cli/test2/Main.check.j9vm5 b/test/attic/files/cli/test2/Main.check.j9vm5
deleted file mode 100644
index 8f4fdf8aa1..0000000000
--- a/test/attic/files/cli/test2/Main.check.j9vm5
+++ /dev/null
@@ -1,4 +0,0 @@
-env: -cpp: No such file or directory
-env: test2.Main: No such file or directory
-env: -cp: No such file or directory
-2: 1: test 3 passed
diff --git a/test/attic/files/cli/test2/Main.check.java b/test/attic/files/cli/test2/Main.check.java
deleted file mode 100644
index aca383de3e..0000000000
--- a/test/attic/files/cli/test2/Main.check.java
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.java5 b/test/attic/files/cli/test2/Main.check.java5
deleted file mode 100644
index aca383de3e..0000000000
--- a/test/attic/files/cli/test2/Main.check.java5
+++ /dev/null
@@ -1,6 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.java5_api b/test/attic/files/cli/test2/Main.check.java5_api
deleted file mode 100644
index 4ff775c3da..0000000000
--- a/test/attic/files/cli/test2/Main.check.java5_api
+++ /dev/null
@@ -1,24 +0,0 @@
-|-- allclasses-frame.html
-|-- allclasses-noframe.html
-|-- constant-values.html
-|-- deprecated-list.html
-|-- help-doc.html
-|-- index-all.html
-|-- index.html
-|-- overview-tree.html
-|-- package-list
-|-- resources
-| `-- inherit.gif
-|-- stylesheet.css
-|-- test1
-| |-- Main.html
-| |-- package-frame.html
-| |-- package-summary.html
-| `-- package-tree.html
-`-- test2
- |-- Main.html
- |-- package-frame.html
- |-- package-summary.html
- `-- package-tree.html
-
-3 directories, 19 files
diff --git a/test/attic/files/cli/test2/Main.check.java5_j9 b/test/attic/files/cli/test2/Main.check.java5_j9
deleted file mode 100644
index 2dcb6e892a..0000000000
--- a/test/attic/files/cli/test2/Main.check.java5_j9
+++ /dev/null
@@ -1,36 +0,0 @@
-JVMJ9VM007E Command-line option unrecognised: -cpp
-Could not create the Java virtual machine.
-
-Usage: java [-options] class [args...]
- (to execute a class)
- or java [-jar] [-options] jarfile [args...]
- (to execute a jar file)
-
-where options include:
- -cp -classpath <directories and zip/jar files separated by :>
- set search path for application classes and resources
- -D<name>=<value>
- set a system property
- -verbose[:class|gc|jni]
- enable verbose output
- -version print product version
- -version:<value>
- require the specified version to run
- -showversion print product version and continue
- -jre-restrict-search | -no-jre-restrict-search
- include/exclude user private JREs in the version search
- -agentlib:<libname>[=<options>]
- load native agent library <libname>, e.g. -agentlib:hprof
- see also, -agentlib:jdwp=help and -agentlib:hprof=help
- -agentpath:<pathname>[=<options>]
- load native agent library by full pathname
- -javaagent:<jarpath>[=<options>]
- load Java programming language agent, see java.lang.instrument
- -? -help print this help message
- -X print help on non-standard options
- -assert print help on assert options
-
-The java class is not found: test2.Main
-The java class is not found: test2.Main
-The java class is not found: test2.Main
-The java class is not found: test2.Main
diff --git a/test/attic/files/cli/test2/Main.check.javac b/test/attic/files/cli/test2/Main.check.javac
deleted file mode 100644
index c40c0a7a89..0000000000
--- a/test/attic/files/cli/test2/Main.check.javac
+++ /dev/null
@@ -1,27 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -help Print a synopsis of standard options
-
-files/cli/test2/Main.java:6: package test1 does not exist
- test1.Main.main(args);
- ^
-1 error
-files/cli/test2/Main.java:6: package test1 does not exist
- test1.Main.main(args);
- ^
-1 error
diff --git a/test/attic/files/cli/test2/Main.check.javac5 b/test/attic/files/cli/test2/Main.check.javac5
deleted file mode 100644
index 0ac32b056e..0000000000
--- a/test/attic/files/cli/test2/Main.check.javac5
+++ /dev/null
@@ -1,28 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files
- -cp <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -endorseddirs <dirs> Override location of endorsed standards path
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -version Version information
- -help Print a synopsis of standard options
- -X Print a synopsis of nonstandard options
- -J<flag> Pass <flag> directly to the runtime system
-
-files/cli/test2/Main.java:6: package test1 does not exist
- test1.Main.main(args);
- ^
-1 error
diff --git a/test/attic/files/cli/test2/Main.check.javac6 b/test/attic/files/cli/test2/Main.check.javac6
deleted file mode 100644
index 350d3253bc..0000000000
--- a/test/attic/files/cli/test2/Main.check.javac6
+++ /dev/null
@@ -1,33 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files and annotation processors
- -cp <path> Specify where to find user class files and annotation processors
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -endorseddirs <dirs> Override location of endorsed standards path
- -proc:{none, only} Control whether annotation processing and/or compilation is done.
- -processor <class> Name of the annotation processor to run; bypasses default discovery process
- -processorpath <path> Specify where to find annotation processors
- -d <directory> Specify where to place generated class files
- -s <directory> Specify where to place generated source files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -version Version information
- -help Print a synopsis of standard options
- -A[key[=value]] Options to pass to annotation processors
- -X Print a synopsis of nonstandard options
- -J<flag> Pass <flag> directly to the runtime system
-
-files/cli/test2/Main.java:5: package test1 does not exist
- test1.Main.main(args);
- ^
-1 error
diff --git a/test/attic/files/cli/test2/Main.check.jikes b/test/attic/files/cli/test2/Main.check.jikes
deleted file mode 100644
index 97943e8347..0000000000
--- a/test/attic/files/cli/test2/Main.check.jikes
+++ /dev/null
@@ -1,9 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 1 semantic error compiling "files/cli/test2/Main.java":
-
- 6. test1.Main.main(args);
- ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test2.Main".
diff --git a/test/attic/files/cli/test2/Main.check.jikes5 b/test/attic/files/cli/test2/Main.check.jikes5
deleted file mode 100644
index 97943e8347..0000000000
--- a/test/attic/files/cli/test2/Main.check.jikes5
+++ /dev/null
@@ -1,9 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 1 semantic error compiling "files/cli/test2/Main.java":
-
- 6. test1.Main.main(args);
- ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test2.Main".
diff --git a/test/attic/files/cli/test2/Main.check.scala b/test/attic/files/cli/test2/Main.check.scala
deleted file mode 100644
index 7e5f17625b..0000000000
--- a/test/attic/files/cli/test2/Main.check.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed. See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.scala_api b/test/attic/files/cli/test2/Main.check.scala_api
deleted file mode 100644
index bcb0f0c7fb..0000000000
--- a/test/attic/files/cli/test2/Main.check.scala_api
+++ /dev/null
@@ -1,37 +0,0 @@
-|-- all-classes.html
-|-- index.html
-|-- modules.html
-|-- nav-classes.html
-|-- root-content.html
-|-- scala
-| |-- Any.html
-| |-- AnyRef.html
-| |-- AnyVal.html
-| |-- Boolean.html
-| |-- Byte.html
-| |-- Char.html
-| |-- Double.html
-| |-- Float.html
-| |-- Int.html
-| |-- Long.html
-| |-- Nothing.html
-| |-- Null.html
-| |-- Short.html
-| |-- Unit.html
-| `-- runtime
-| |-- BoxedFloat.html
-| |-- BoxedInt.html
-| |-- BoxedLong.html
-| `-- BoxedNumber.html
-|-- script.js
-|-- style.css
-|-- test1
-| `-- Main$object.html
-|-- test1$content.html
-|-- test1$package.html
-|-- test2
-| `-- Main$object.html
-|-- test2$content.html
-`-- test2$package.html
-
-4 directories, 31 files
diff --git a/test/attic/files/cli/test2/Main.check.scala_j9 b/test/attic/files/cli/test2/Main.check.scala_j9
deleted file mode 100644
index 80cbb50fa9..0000000000
--- a/test/attic/files/cli/test2/Main.check.scala_j9
+++ /dev/null
@@ -1,15 +0,0 @@
-unknown option: '-cpp'
-scala [ <compiler-option> | -howtorun:how ]... [<torun> <arguments>]
-
-<compiler-option>'s are as for scalac; see scalac -help.
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
--howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-2: 1: test 1 passed (1)
-2: 1: test 2 passed (1)
-2: 1: test 3 passed (1)
-2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test2/Main.check.scalac b/test/attic/files/cli/test2/Main.check.scalac
deleted file mode 100644
index 8465810d0b..0000000000
--- a/test/attic/files/cli/test2/Main.check.scalac
+++ /dev/null
@@ -1,63 +0,0 @@
-scalac error: bad option: '-dd'
- scalac -help gives more information
-Usage: scalac <options | source files>
-where possible options include:
- -doc Generate documentation
- -g:<g> Generate debugging info (none,source,line,vars,notc)
- -nowarn Generate no warnings
- -noassert Generate no assertions and assumptions
- -verbose Output messages about what the compiler is doing
- -classpath <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -windowtitle <windowtitle> Specify window title of generated HTML documentation
- -documenttitle <documenttitle> Specify document title of generated HTML documentation
- -target:<target> Specify which backend to use (jvm-1.5,msil)
- -migrate Assist in migrating from Scala version 1.0
- -o <file> Name of the output assembly (only relevant with -target:msil)
- -r <path> List of assemblies referenced by the program (only relevant with -target:msil)
- -debug Output debugging messages
- -deprecation enable detailed deprecation warnings
- -unchecked enable detailed unchecked warnings
- -statistics Print compiler statistics
- -explaintypes Explain type errors in more detail
- -resident Compiler stays resident, files to compile are read from standard input
- -uniqid Print identifiers with unique names (debugging option)
- -printtypes Print tree types (debugging option)
- -prompt Display a prompt after each error (debugging option)
- -noimports Compile without any implicit imports
- -nopredefs Compile without any implicit predefined values
- -skip:<phase> Skip <phase>
- -check:<phase> Check the tree at start of <phase>
- -print:<phase> Print out program after <phase>
- -printer:<printer> Printer to use (text,html)
- -printfile <file> Specify file in which to print trees
- -graph:<phase> Graph the program after <phase>
- -browse:<phase> Browse the abstract syntax tree after <phase>
- -stop:<phase> Stop after phase <phase>
- -log:<phase> Log operations in <phase>
- -logall Log all operations
- -version Print product version and exit
- -help Print a synopsis of standard options
- -nouescape disables handling of \u unicode escapes
- -Xinline Perform inlining when possible
- -XO Optimize. implies -Xinline, -Xcloselim and -Xdce
- -Xcloselim Perform closure elimination
- -Xdce Perform dead code elimination
- -Xwarndeadcode Emit warnings for dead code
- -XbytecodeRead Enable bytecode reader.
- -Xdetach Perform detaching of remote closures
- -Xshowcls <class> Show class info
- -Xshowobj <object> Show object info
- -Xlinearizer:<Xlinearizer> Linearizer to use (normal,dfs,rpo,dump)
- -Xgenerics Use generic Java types
- -Xprintpos Print tree positions (as offsets)
- -Xscript compile script file
- -Xexperimental enable experimental extensions
- -Xplugtypes parse but ignore annotations in more locations
- -Xkilloption optimizes option types
-
-one error found
diff --git a/test/attic/files/cli/test2/Main.check.scalaint b/test/attic/files/cli/test2/Main.check.scalaint
deleted file mode 100644
index 89b6766bb5..0000000000
--- a/test/attic/files/cli/test2/Main.check.scalaint
+++ /dev/null
@@ -1,45 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed. See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 2: 1: test 1 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 2: 1: test 2 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 2: 1: test 3 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
diff --git a/test/attic/files/cli/test2/Main.java b/test/attic/files/cli/test2/Main.java
deleted file mode 100644
index f6797632bf..0000000000
--- a/test/attic/files/cli/test2/Main.java
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info 1 dependency
-package test2;
-public class Main {
- public static void main(String args[]) {
- System.out.print("2: ");
- test1.Main.main(args);
- }
-}
diff --git a/test/attic/files/cli/test2/Main.scala b/test/attic/files/cli/test2/Main.scala
deleted file mode 100644
index 11c878b9c0..0000000000
--- a/test/attic/files/cli/test2/Main.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-// @info 1 dependency
-package test2
-object Main {
- def main(args: Array[String]) = {
- Console.print("2: ")
- test1.Main.main(args)
- }
-}
diff --git a/test/attic/files/cli/test3/Main.check.j9vm5 b/test/attic/files/cli/test3/Main.check.j9vm5
deleted file mode 100644
index a094dc8daf..0000000000
--- a/test/attic/files/cli/test3/Main.check.j9vm5
+++ /dev/null
@@ -1,5 +0,0 @@
-env: -cpp: No such file or directory
-env: test3.Main: No such file or directory
-env: -cp: No such file or directory
-3: 1: test 3 passed
-3: 2: 1: test 3 passed
diff --git a/test/attic/files/cli/test3/Main.check.java b/test/attic/files/cli/test3/Main.check.java
deleted file mode 100644
index de3eb7b136..0000000000
--- a/test/attic/files/cli/test3/Main.check.java
+++ /dev/null
@@ -1,10 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.java5 b/test/attic/files/cli/test3/Main.check.java5
deleted file mode 100644
index de3eb7b136..0000000000
--- a/test/attic/files/cli/test3/Main.check.java5
+++ /dev/null
@@ -1,10 +0,0 @@
-Unrecognized option: -cpp
-Could not create the Java virtual machine.
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.java5_api b/test/attic/files/cli/test3/Main.check.java5_api
deleted file mode 100644
index f6112211f0..0000000000
--- a/test/attic/files/cli/test3/Main.check.java5_api
+++ /dev/null
@@ -1,29 +0,0 @@
-|-- allclasses-frame.html
-|-- allclasses-noframe.html
-|-- constant-values.html
-|-- deprecated-list.html
-|-- help-doc.html
-|-- index-all.html
-|-- index.html
-|-- overview-tree.html
-|-- package-list
-|-- resources
-| `-- inherit.gif
-|-- stylesheet.css
-|-- test1
-| |-- Main.html
-| |-- package-frame.html
-| |-- package-summary.html
-| `-- package-tree.html
-|-- test2
-| |-- Main.html
-| |-- package-frame.html
-| |-- package-summary.html
-| `-- package-tree.html
-`-- test3
- |-- Main.html
- |-- package-frame.html
- |-- package-summary.html
- `-- package-tree.html
-
-4 directories, 23 files
diff --git a/test/attic/files/cli/test3/Main.check.java5_j9 b/test/attic/files/cli/test3/Main.check.java5_j9
deleted file mode 100644
index 9e228d7649..0000000000
--- a/test/attic/files/cli/test3/Main.check.java5_j9
+++ /dev/null
@@ -1,36 +0,0 @@
-JVMJ9VM007E Command-line option unrecognised: -cpp
-Could not create the Java virtual machine.
-
-Usage: java [-options] class [args...]
- (to execute a class)
- or java [-jar] [-options] jarfile [args...]
- (to execute a jar file)
-
-where options include:
- -cp -classpath <directories and zip/jar files separated by :>
- set search path for application classes and resources
- -D<name>=<value>
- set a system property
- -verbose[:class|gc|jni]
- enable verbose output
- -version print product version
- -version:<value>
- require the specified version to run
- -showversion print product version and continue
- -jre-restrict-search | -no-jre-restrict-search
- include/exclude user private JREs in the version search
- -agentlib:<libname>[=<options>]
- load native agent library <libname>, e.g. -agentlib:hprof
- see also, -agentlib:jdwp=help and -agentlib:hprof=help
- -agentpath:<pathname>[=<options>]
- load native agent library by full pathname
- -javaagent:<jarpath>[=<options>]
- load Java programming language agent, see java.lang.instrument
- -? -help print this help message
- -X print help on non-standard options
- -assert print help on assert options
-
-The java class is not found: test3.Main
-The java class is not found: test3.Main
-The java class is not found: test3.Main
-The java class is not found: test3.Main
diff --git a/test/attic/files/cli/test3/Main.check.javac b/test/attic/files/cli/test3/Main.check.javac
deleted file mode 100644
index 8d235b647b..0000000000
--- a/test/attic/files/cli/test3/Main.check.javac
+++ /dev/null
@@ -1,33 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -help Print a synopsis of standard options
-
-files/cli/test3/Main.java:6: package test1 does not exist
- test1.Main.main(args);
- ^
-files/cli/test3/Main.java:8: package test2 does not exist
- test2.Main.main(args);
- ^
-2 errors
-files/cli/test3/Main.java:6: package test1 does not exist
- test1.Main.main(args);
- ^
-files/cli/test3/Main.java:8: package test2 does not exist
- test2.Main.main(args);
- ^
-2 errors
diff --git a/test/attic/files/cli/test3/Main.check.javac5 b/test/attic/files/cli/test3/Main.check.javac5
deleted file mode 100644
index 3a48fa000e..0000000000
--- a/test/attic/files/cli/test3/Main.check.javac5
+++ /dev/null
@@ -1,31 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files
- -cp <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -endorseddirs <dirs> Override location of endorsed standards path
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -version Version information
- -help Print a synopsis of standard options
- -X Print a synopsis of nonstandard options
- -J<flag> Pass <flag> directly to the runtime system
-
-files/cli/test3/Main.java:6: package test1 does not exist
- test1.Main.main(args);
- ^
-files/cli/test3/Main.java:8: package test2 does not exist
- test2.Main.main(args);
- ^
-2 errors
diff --git a/test/attic/files/cli/test3/Main.check.javac6 b/test/attic/files/cli/test3/Main.check.javac6
deleted file mode 100644
index 677b950aed..0000000000
--- a/test/attic/files/cli/test3/Main.check.javac6
+++ /dev/null
@@ -1,36 +0,0 @@
-javac: invalid flag: -dd
-Usage: javac <options> <source files>
-where possible options include:
- -g Generate all debugging info
- -g:none Generate no debugging info
- -g:{lines,vars,source} Generate only some debugging info
- -nowarn Generate no warnings
- -verbose Output messages about what the compiler is doing
- -deprecation Output source locations where deprecated APIs are used
- -classpath <path> Specify where to find user class files and annotation processors
- -cp <path> Specify where to find user class files and annotation processors
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -endorseddirs <dirs> Override location of endorsed standards path
- -proc:{none, only} Control whether annotation processing and/or compilation is done.
- -processor <class> Name of the annotation processor to run; bypasses default discovery process
- -processorpath <path> Specify where to find annotation processors
- -d <directory> Specify where to place generated class files
- -s <directory> Specify where to place generated source files
- -encoding <encoding> Specify character encoding used by source files
- -source <release> Provide source compatibility with specified release
- -target <release> Generate class files for specific VM version
- -version Version information
- -help Print a synopsis of standard options
- -A[key[=value]] Options to pass to annotation processors
- -X Print a synopsis of nonstandard options
- -J<flag> Pass <flag> directly to the runtime system
-
-files/cli/test3/Main.java:5: package test1 does not exist
- test1.Main.main(args);
- ^
-files/cli/test3/Main.java:7: package test2 does not exist
- test2.Main.main(args);
- ^
-2 errors
diff --git a/test/attic/files/cli/test3/Main.check.jikes b/test/attic/files/cli/test3/Main.check.jikes
deleted file mode 100644
index 604333e81a..0000000000
--- a/test/attic/files/cli/test3/Main.check.jikes
+++ /dev/null
@@ -1,14 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 2 semantic errors compiling "files/cli/test3/Main.java":
-
- 6. test1.Main.main(args);
- ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test3.Main".
-
-
- 8. test2.Main.main(args);
- ^---^
-*** Semantic Error: No accessible field named "test2" was found in type "test3.Main".
diff --git a/test/attic/files/cli/test3/Main.check.jikes5 b/test/attic/files/cli/test3/Main.check.jikes5
deleted file mode 100644
index 604333e81a..0000000000
--- a/test/attic/files/cli/test3/Main.check.jikes5
+++ /dev/null
@@ -1,14 +0,0 @@
-Error: "-dd" is an invalid option.
-use: jikes [options] [@files] file.java...
-For more help, try -help or -version.
-
-Found 2 semantic errors compiling "files/cli/test3/Main.java":
-
- 6. test1.Main.main(args);
- ^---^
-*** Semantic Error: No accessible field named "test1" was found in type "test3.Main".
-
-
- 8. test2.Main.main(args);
- ^---^
-*** Semantic Error: No accessible field named "test2" was found in type "test3.Main".
diff --git a/test/attic/files/cli/test3/Main.check.scala b/test/attic/files/cli/test3/Main.check.scala
deleted file mode 100644
index f78729b9a2..0000000000
--- a/test/attic/files/cli/test3/Main.check.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed. See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.scala_api b/test/attic/files/cli/test3/Main.check.scala_api
deleted file mode 100644
index 4552819b5b..0000000000
--- a/test/attic/files/cli/test3/Main.check.scala_api
+++ /dev/null
@@ -1,41 +0,0 @@
-|-- all-classes.html
-|-- index.html
-|-- modules.html
-|-- nav-classes.html
-|-- root-content.html
-|-- scala
-| |-- Any.html
-| |-- AnyRef.html
-| |-- AnyVal.html
-| |-- Boolean.html
-| |-- Byte.html
-| |-- Char.html
-| |-- Double.html
-| |-- Float.html
-| |-- Int.html
-| |-- Long.html
-| |-- Nothing.html
-| |-- Null.html
-| |-- Short.html
-| |-- Unit.html
-| `-- runtime
-| |-- BoxedFloat.html
-| |-- BoxedInt.html
-| |-- BoxedLong.html
-| `-- BoxedNumber.html
-|-- script.js
-|-- style.css
-|-- test1
-| `-- Main$object.html
-|-- test1$content.html
-|-- test1$package.html
-|-- test2
-| `-- Main$object.html
-|-- test2$content.html
-|-- test2$package.html
-|-- test3
-| `-- Main$object.html
-|-- test3$content.html
-`-- test3$package.html
-
-5 directories, 34 files
diff --git a/test/attic/files/cli/test3/Main.check.scala_j9 b/test/attic/files/cli/test3/Main.check.scala_j9
deleted file mode 100644
index 3804c17636..0000000000
--- a/test/attic/files/cli/test3/Main.check.scala_j9
+++ /dev/null
@@ -1,19 +0,0 @@
-unknown option: '-cpp'
-scala [ <compiler-option> | -howtorun:how ]... [<torun> <arguments>]
-
-<compiler-option>'s are as for scalac; see scalac -help.
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
--howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-3: 1: test 4 passed (2)
-3: 2: 1: test 4 passed (2)
diff --git a/test/attic/files/cli/test3/Main.check.scalac b/test/attic/files/cli/test3/Main.check.scalac
deleted file mode 100644
index 8465810d0b..0000000000
--- a/test/attic/files/cli/test3/Main.check.scalac
+++ /dev/null
@@ -1,63 +0,0 @@
-scalac error: bad option: '-dd'
- scalac -help gives more information
-Usage: scalac <options | source files>
-where possible options include:
- -doc Generate documentation
- -g:<g> Generate debugging info (none,source,line,vars,notc)
- -nowarn Generate no warnings
- -noassert Generate no assertions and assumptions
- -verbose Output messages about what the compiler is doing
- -classpath <path> Specify where to find user class files
- -sourcepath <path> Specify where to find input source files
- -bootclasspath <path> Override location of bootstrap class files
- -extdirs <dirs> Override location of installed extensions
- -d <directory> Specify where to place generated class files
- -encoding <encoding> Specify character encoding used by source files
- -windowtitle <windowtitle> Specify window title of generated HTML documentation
- -documenttitle <documenttitle> Specify document title of generated HTML documentation
- -target:<target> Specify which backend to use (jvm-1.5,msil)
- -migrate Assist in migrating from Scala version 1.0
- -o <file> Name of the output assembly (only relevant with -target:msil)
- -r <path> List of assemblies referenced by the program (only relevant with -target:msil)
- -debug Output debugging messages
- -deprecation enable detailed deprecation warnings
- -unchecked enable detailed unchecked warnings
- -statistics Print compiler statistics
- -explaintypes Explain type errors in more detail
- -resident Compiler stays resident, files to compile are read from standard input
- -uniqid Print identifiers with unique names (debugging option)
- -printtypes Print tree types (debugging option)
- -prompt Display a prompt after each error (debugging option)
- -noimports Compile without any implicit imports
- -nopredefs Compile without any implicit predefined values
- -skip:<phase> Skip <phase>
- -check:<phase> Check the tree at start of <phase>
- -print:<phase> Print out program after <phase>
- -printer:<printer> Printer to use (text,html)
- -printfile <file> Specify file in which to print trees
- -graph:<phase> Graph the program after <phase>
- -browse:<phase> Browse the abstract syntax tree after <phase>
- -stop:<phase> Stop after phase <phase>
- -log:<phase> Log operations in <phase>
- -logall Log all operations
- -version Print product version and exit
- -help Print a synopsis of standard options
- -nouescape disables handling of \u unicode escapes
- -Xinline Perform inlining when possible
- -XO Optimize. implies -Xinline, -Xcloselim and -Xdce
- -Xcloselim Perform closure elimination
- -Xdce Perform dead code elimination
- -Xwarndeadcode Emit warnings for dead code
- -XbytecodeRead Enable bytecode reader.
- -Xdetach Perform detaching of remote closures
- -Xshowcls <class> Show class info
- -Xshowobj <object> Show object info
- -Xlinearizer:<Xlinearizer> Linearizer to use (normal,dfs,rpo,dump)
- -Xgenerics Use generic Java types
- -Xprintpos Print tree positions (as offsets)
- -Xscript compile script file
- -Xexperimental enable experimental extensions
- -Xplugtypes parse but ignore annotations in more locations
- -Xkilloption optimizes option types
-
-one error found
diff --git a/test/attic/files/cli/test3/Main.check.scalaint b/test/attic/files/cli/test3/Main.check.scalaint
deleted file mode 100644
index cffa02c5b6..0000000000
--- a/test/attic/files/cli/test3/Main.check.scalaint
+++ /dev/null
@@ -1,48 +0,0 @@
-unknown option: '-cpp'
-scala [ <option> ]... [<torun> <arguments>]
-
-All options to scalac are allowed. See scalac -help.
-
-<torun>, if present, is an object or script file to run.
-If no <torun> is present, run an interactive interpreter.
-
-Option -howtorun allows explicitly specifying how to run <torun>:
- script: it is a script file
- object: it is an object name
- guess: (the default) try to guess
-
-Option -savecompiled requests that the compiled script be saved
-for future use.
-
-Option -nocompdaemon requests that the fsc offline compiler not be used.
-
-Option -Dproperty=value sets a Java system property.
-
-
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 3: 1: test 1 passed (1)
-3: 2: 1: test 1 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 3: 1: test 2 passed (1)
-3: 2: 1: test 2 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
-This is an interpreter for Scala.
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> 3: 1: test 3 passed (1)
-3: 2: 1: test 3 passed (1)
-unnamed0: scala.Unit = ()
-
-scala>
diff --git a/test/attic/files/cli/test3/Main.java b/test/attic/files/cli/test3/Main.java
deleted file mode 100644
index 208863d012..0000000000
--- a/test/attic/files/cli/test3/Main.java
+++ /dev/null
@@ -1,10 +0,0 @@
-// @info 2 dependency
-package test3;
-public class Main {
- public static void main(String args[]) {
- System.out.print("3: ");
- test1.Main.main(args);
- System.out.print("3: ");
- test2.Main.main(args);
- }
-}
diff --git a/test/attic/files/cli/test3/Main.scala b/test/attic/files/cli/test3/Main.scala
deleted file mode 100644
index 63fc11b771..0000000000
--- a/test/attic/files/cli/test3/Main.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-// @info 2 dependencies
-package test3
-object Main {
- def main(args: Array[String]) = {
- Console.print("3: ")
- test1.Main.main(args)
- Console.print("3: ")
- test2.Main.main(args)
- }
-}
diff --git a/test/files/disabled/A.scala b/test/disabled/buildmanager/overloaded_1/A.scala
index c070faf978..c070faf978 100644
--- a/test/files/disabled/A.scala
+++ b/test/disabled/buildmanager/overloaded_1/A.scala
diff --git a/test/files/disabled/overloaded_1.check b/test/disabled/buildmanager/overloaded_1/overloaded_1.check
index 4d643ce6b4..4d643ce6b4 100644
--- a/test/files/disabled/overloaded_1.check
+++ b/test/disabled/buildmanager/overloaded_1/overloaded_1.check
diff --git a/test/files/disabled/overloaded_1.test b/test/disabled/buildmanager/overloaded_1/overloaded_1.test
index 392e0d365f..392e0d365f 100644
--- a/test/files/disabled/overloaded_1.test
+++ b/test/disabled/buildmanager/overloaded_1/overloaded_1.test
diff --git a/test/files/disabled/t4245/A.scala b/test/disabled/buildmanager/t4245/A.scala
index 7c4efe1b4b..7c4efe1b4b 100644
--- a/test/files/disabled/t4245/A.scala
+++ b/test/disabled/buildmanager/t4245/A.scala
diff --git a/test/files/disabled/t4245/t4245.check b/test/disabled/buildmanager/t4245/t4245.check
index 3d3898c671..3d3898c671 100644
--- a/test/files/disabled/t4245/t4245.check
+++ b/test/disabled/buildmanager/t4245/t4245.check
diff --git a/test/files/disabled/t4245/t4245.test b/test/disabled/buildmanager/t4245/t4245.test
index 392e0d365f..392e0d365f 100644
--- a/test/files/disabled/t4245/t4245.test
+++ b/test/disabled/buildmanager/t4245/t4245.test
diff --git a/test/disabled/pos/spec-List.scala b/test/disabled/pos/spec-List.scala
index 81e55f46cb..b31e035c1b 100644
--- a/test/disabled/pos/spec-List.scala
+++ b/test/disabled/pos/spec-List.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/disabled/presentation/akka.flags b/test/disabled/presentation/akka.flags
index 56d026a62d..9bf2878f62 100644
--- a/test/disabled/presentation/akka.flags
+++ b/test/disabled/presentation/akka.flags
@@ -12,7 +12,7 @@
# running partest from. Run it from the root scala checkout for these files to resolve correctly
# (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
# framework translates them to the platform dependent representation.
-# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar
+# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar
# the following line would test using the quick compiler
-# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar
+# -bootclasspath build/quick/classes/compiler:build/quick/classes/library
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
index 7dd1bf6218..a567d0bcb0 100644
--- a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
+++ b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
@@ -89,7 +89,7 @@ object Dispatchers {
new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut)
/**
- * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+ * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
* <p/>
* Has a fluent builder interface for configuring its semantics.
*/
@@ -97,7 +97,7 @@ object Dispatchers {
ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenDispatcher(name, config), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+ * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
* <p/>
* Has a fluent builder interface for configuring its semantics.
*/
@@ -106,7 +106,7 @@ object Dispatchers {
new ExecutorBasedEventDrivenDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+ * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
* <p/>
* Has a fluent builder interface for configuring its semantics.
*/
@@ -115,7 +115,7 @@ object Dispatchers {
new ExecutorBasedEventDrivenDispatcher(name, throughput, throughputDeadlineMs, mailboxType, config), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
* <p/>
* Has a fluent builder interface for configuring its semantics.
*/
@@ -123,7 +123,7 @@ object Dispatchers {
ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenWorkStealingDispatcher(name, config), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
* <p/>
* Has a fluent builder interface for configuring its semantics.
*/
@@ -132,7 +132,7 @@ object Dispatchers {
new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, MAILBOX_TYPE, config), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
* <p/>
* Has a fluent builder interface for configuring its semantics.
*/
@@ -141,7 +141,7 @@ object Dispatchers {
new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
* <p/>
* Has a fluent builder interface for configuring its semantics.
*/
@@ -224,4 +224,4 @@ class ExecutorBasedEventDrivenWorkStealingDispatcherConfigurator extends Message
mailboxType(config),
threadPoolConfig)).build
}
-} \ No newline at end of file
+}
diff --git a/test/disabled/presentation/simple-tests.check b/test/disabled/presentation/simple-tests.check
index cdb80ed987..0f72cb5ab9 100644
--- a/test/disabled/presentation/simple-tests.check
+++ b/test/disabled/presentation/simple-tests.check
@@ -187,8 +187,6 @@ TypeMember(value Xshowobj,Tester.this.settings.StringSetting,false,true,<none>)
TypeMember(value Xshowtrees,Tester.this.settings.BooleanSetting,false,true,<none>)
TypeMember(value Xwarnfatal,Tester.this.settings.BooleanSetting,false,true,<none>)
TypeMember(value Xwarninit,Tester.this.settings.BooleanSetting,false,true,<none>)
-TypeMember(value Ybuilderdebug,Tester.this.settings.ChoiceSetting,false,true,<none>)
-TypeMember(value Ybuildmanagerdebug,Tester.this.settings.BooleanSetting,false,true,<none>)
TypeMember(value Ycompacttrees,Tester.this.settings.BooleanSetting,false,true,<none>)
TypeMember(value Ycompletion,Tester.this.settings.BooleanSetting,false,true,<none>)
TypeMember(value YdepMethTpes,Tester.this.settings.BooleanSetting,false,true,<none>)
diff --git a/test/disabled/presentation/simple-tests.opts b/test/disabled/presentation/simple-tests.opts
index 8529bbf1a0..d651316984 100644
--- a/test/disabled/presentation/simple-tests.opts
+++ b/test/disabled/presentation/simple-tests.opts
@@ -12,7 +12,7 @@
# running partest from. Run it from the root scala checkout for these files to resolve correctly
# (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
# framework translates them to the platform dependent representation.
--bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar
+-bootclasspath lib/scala-compiler.jar:lib/scala-library.jar
# the following line would test using the quick compiler
-# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar
+# -bootclasspath build/quick/classes/compiler:build/quick/classes/library
diff --git a/test/files/run/t4146.scala b/test/disabled/run/t4146.scala
index 93ce22b519..a17de50ee1 100644
--- a/test/files/run/t4146.scala
+++ b/test/disabled/run/t4146.scala
@@ -1,4 +1,4 @@
-object bob extends Application {
+object bob extends App {
var name = "Bob"
}
diff --git a/test/files/ant/imported.xml b/test/files/ant/imported.xml
index 5a4dfc319b..182c80aadf 100644
--- a/test/files/ant/imported.xml
+++ b/test/files/ant/imported.xml
@@ -56,7 +56,6 @@ INITIALISATION
<property name="scala.dir" value="${quick.dir}"/>
<property name="scala-library.lib" value="${scala.dir}/classes/library/"/>
<property name="scala-compiler.lib" value="${scala.dir}/classes/compiler/"/>
- <property name="fjbg.lib" value="${project.dir}/lib/fjbg.jar"/>
</target>
<target name="pack.init" if="pack.binary">
@@ -67,7 +66,6 @@ INITIALISATION
<property name="scala.dir" value="${pack.dir}"/>
<property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
<property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- <property name="fjbg.lib" value=""/>
</target>
<target name="latest.init" if="latest.binary">
@@ -78,7 +76,6 @@ INITIALISATION
<property name="scala.dir" value="${latest.dir}"/>
<property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
<property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- <property name="fjbg.lib" value=""/>
</target>
<target name="installed.init" if="installed.binary">
@@ -89,7 +86,6 @@ INITIALISATION
<property name="scala.dir" value="${installed.dir}"/>
<property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
<property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
- <property name="fjbg.lib" value=""/>
</target>
<target name="init" depends="quick.init, pack.init, latest.init, installed.init">
@@ -98,7 +94,6 @@ INITIALISATION
<path id="scala.classpath">
<pathelement location="${scala-library.lib}"/>
<pathelement location="${scala-compiler.lib}"/>
- <pathelement location="${fjbg.lib}"/> <!-- only present for 'quick' -->
</path>
<fail message="Scala library '${scala-library.lib}' or '${scala-compiler.lib}' is missing/broken">
diff --git a/test/files/bench/equality/eqeq.eqlog b/test/files/bench/equality/eqeq.eqlog
index d1e27aceed..55a5eb430a 100644
--- a/test/files/bench/equality/eqeq.eqlog
+++ b/test/files/bench/equality/eqeq.eqlog
@@ -1,42 +1,42 @@
-Banchmark results for testing equality operations:
-eq.scala: Base case, use eq equality only
-eqeq.scala: Test case, use == instead of eq.
-All tests run on Thinkpad T400, 1.6.0_12 client VM.
-Test command: java eq 5 5
- java eqeq 5 5
-eq.scala, no -optimise
-eq$ 109 78 79 63 63
-eq$ 94 63 63 78 78
-eq$ 94 62 62 62 78
-eq$ 94 78 78 78 78
-eq$ 94 78 78 78 78
-eq.scala, with -optimise
-eq$ 421 63 62 47 63
-eq$ 406 62 62 63 62
-eq$ 407 62 62 78 63
-eq$ 406 63 63 62 62
-eq$ 407 62 62 63 47
-eqeq.scala with version of BoxesRuntime as of Nov 13th, no -optimise
-eqeq$ 562 516 516 516 515
-eqeq$ 547 515 515 531 532
-eqeq$ 532 516 516 515 516
-eqeq$ 547 531 531 516 531
-eqeq$ 547 515 515 516 516
-eqeq.scala with version of BoxesRuntime as of Nov 13th, with -optimise
-eqeq$ 1031 390 391 391 391
-eqeq$ 1031 391 391 391 390
-eqeq$ 1031 390 390 391 391
-eqeq$ 1031 406 407 391 390
-eqeq$ 1031 390 390 391 391
-eqeq.scala with 1st optimized of Nov 14th, no -optimise
-eqeq$ 484 421 438 438 437
-eqeq$ 484 438 437 437 438
-eqeq$ 469 437 453 454 438
-eqeq$ 468 437 438 468 438
-eqeq$ 485 437 437 422 438
-eqeq.scala with 1st optimized of Nov 14th, with -optimise
-eqeq$ 1016 375 391 375 375
-eqeq$ 1016 375 391 390 375
-eqeq$ 1016 390 391 375 375
-eqeq$ 1015 375 391 390 375
-eqeq$ 1016 390 375 375 375
+Banchmark results for testing equality operations:
+eq.scala: Base case, use eq equality only
+eqeq.scala: Test case, use == instead of eq.
+All tests run on Thinkpad T400, 1.6.0_12 client VM.
+Test command: java eq 5 5
+ java eqeq 5 5
+eq.scala, no -optimise
+eq$ 109 78 79 63 63
+eq$ 94 63 63 78 78
+eq$ 94 62 62 62 78
+eq$ 94 78 78 78 78
+eq$ 94 78 78 78 78
+eq.scala, with -optimise
+eq$ 421 63 62 47 63
+eq$ 406 62 62 63 62
+eq$ 407 62 62 78 63
+eq$ 406 63 63 62 62
+eq$ 407 62 62 63 47
+eqeq.scala with version of BoxesRuntime as of Nov 13th, no -optimise
+eqeq$ 562 516 516 516 515
+eqeq$ 547 515 515 531 532
+eqeq$ 532 516 516 515 516
+eqeq$ 547 531 531 516 531
+eqeq$ 547 515 515 516 516
+eqeq.scala with version of BoxesRuntime as of Nov 13th, with -optimise
+eqeq$ 1031 390 391 391 391
+eqeq$ 1031 391 391 391 390
+eqeq$ 1031 390 390 391 391
+eqeq$ 1031 406 407 391 390
+eqeq$ 1031 390 390 391 391
+eqeq.scala with 1st optimized of Nov 14th, no -optimise
+eqeq$ 484 421 438 438 437
+eqeq$ 484 438 437 437 438
+eqeq$ 469 437 453 454 438
+eqeq$ 468 437 438 468 438
+eqeq$ 485 437 437 422 438
+eqeq.scala with 1st optimized of Nov 14th, with -optimise
+eqeq$ 1016 375 391 375 375
+eqeq$ 1016 375 391 390 375
+eqeq$ 1016 390 391 375 375
+eqeq$ 1015 375 391 390 375
+eqeq$ 1016 390 375 375 375
diff --git a/test/files/buildmanager/annotated/A.scala b/test/files/buildmanager/annotated/A.scala
deleted file mode 100644
index 4130cf21ec..0000000000
--- a/test/files/buildmanager/annotated/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-case class A[T](x: String, y: T)
diff --git a/test/files/buildmanager/annotated/annotated.check b/test/files/buildmanager/annotated/annotated.check
deleted file mode 100644
index ce92c9a294..0000000000
--- a/test/files/buildmanager/annotated/annotated.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), object A -> List())
diff --git a/test/files/buildmanager/annotated/annotated.test b/test/files/buildmanager/annotated/annotated.test
deleted file mode 100644
index 392e0d365f..0000000000
--- a/test/files/buildmanager/annotated/annotated.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/freshnames/A.scala b/test/files/buildmanager/freshnames/A.scala
deleted file mode 100644
index e8ab26ca1e..0000000000
--- a/test/files/buildmanager/freshnames/A.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-abstract class A {
-
- var t: List[B]
-
- def foo(n: String): Option[B] = {
- t.reverse find (_.names contains n)
- }
-
- def bar(n: Int): Option[B] = {
- t.reverse find (_.names contains n)
- }
-}
-
-//class A
-case class B(names: List[String])
-
diff --git a/test/files/buildmanager/freshnames/B.scala b/test/files/buildmanager/freshnames/B.scala
deleted file mode 100644
index d700225c08..0000000000
--- a/test/files/buildmanager/freshnames/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-abstract class C extends A {
- def test(n: Int) = bar(n)
-}
-
diff --git a/test/files/buildmanager/freshnames/freshnames.check b/test/files/buildmanager/freshnames/freshnames.check
deleted file mode 100644
index 9f05fb8a36..0000000000
--- a/test/files/buildmanager/freshnames/freshnames.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > B.scala A.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), class B -> List(), object B -> List())
diff --git a/test/files/buildmanager/freshnames/freshnames.test b/test/files/buildmanager/freshnames/freshnames.test
deleted file mode 100644
index 20b20298f9..0000000000
--- a/test/files/buildmanager/freshnames/freshnames.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile B.scala A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/infer/A.scala b/test/files/buildmanager/infer/A.scala
deleted file mode 100644
index 46b5391609..0000000000
--- a/test/files/buildmanager/infer/A.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-class Foo(flag: Boolean) {
- val classpath =
- if (flag)
- new AClasspath
- else
- new BClasspath
-}
-
-class AClasspath extends MergedClasspath[A]
-
-class BClasspath extends MergedClasspath[B]
-
-abstract class MergedClasspath[T]
-
-class A
-class B
diff --git a/test/files/buildmanager/infer/infer.check b/test/files/buildmanager/infer/infer.check
deleted file mode 100644
index 1f736977ff..0000000000
--- a/test/files/buildmanager/infer/infer.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), class AClasspath -> List(), class B -> List(), class BClasspath -> List(), class Foo -> List(), class MergedClasspath -> List())
diff --git a/test/files/buildmanager/infer/infer.test b/test/files/buildmanager/infer/infer.test
deleted file mode 100644
index 392e0d365f..0000000000
--- a/test/files/buildmanager/infer/infer.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/namesdefaults/defparam-use.scala b/test/files/buildmanager/namesdefaults/defparam-use.scala
deleted file mode 100644
index 5b5bbb3f4e..0000000000
--- a/test/files/buildmanager/namesdefaults/defparam-use.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-
-object Test extends App {
- val outer = new Outer
- new outer.Inner
-}
diff --git a/test/files/buildmanager/namesdefaults/defparam.scala b/test/files/buildmanager/namesdefaults/defparam.scala
deleted file mode 100644
index d817c719ab..0000000000
--- a/test/files/buildmanager/namesdefaults/defparam.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class Outer {
-
- class Inner(val x: List[Int] = Nil)
-
-// lazy val Inner = "abc"
-}
-
diff --git a/test/files/buildmanager/namesdefaults/namesdefaults.check b/test/files/buildmanager/namesdefaults/namesdefaults.check
deleted file mode 100644
index 4a94d1fb55..0000000000
--- a/test/files/buildmanager/namesdefaults/namesdefaults.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > defparam.scala defparam-use.scala
-compiling Set(defparam-use.scala, defparam.scala)
-Changes: Map()
-builder > defparam-use.scala
-compiling Set(defparam-use.scala)
-Changes: Map(class Test$delayedInit$body -> List(), object Test -> List())
-builder > defparam-use.scala
-compiling Set(defparam-use.scala)
-Changes: Map(class Test$delayedInit$body -> List(), object Test -> List())
diff --git a/test/files/buildmanager/namesdefaults/namesdefaults.test b/test/files/buildmanager/namesdefaults/namesdefaults.test
deleted file mode 100644
index 84ccc36bc3..0000000000
--- a/test/files/buildmanager/namesdefaults/namesdefaults.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile defparam.scala defparam-use.scala
->>compile defparam-use.scala
->>compile defparam-use.scala
diff --git a/test/files/buildmanager/simpletest/A.scala b/test/files/buildmanager/simpletest/A.scala
deleted file mode 100644
index ef704706bb..0000000000
--- a/test/files/buildmanager/simpletest/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- def foo = 2
-}
diff --git a/test/files/buildmanager/simpletest/B.scala b/test/files/buildmanager/simpletest/B.scala
deleted file mode 100644
index 364dc6e4cb..0000000000
--- a/test/files/buildmanager/simpletest/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class B extends A {
- override def foo = 2
-}
diff --git a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala b/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
deleted file mode 100644
index 83d15dc739..0000000000
--- a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
+++ /dev/null
@@ -1 +0,0 @@
-class A
diff --git a/test/files/buildmanager/simpletest/simpletest.check b/test/files/buildmanager/simpletest/simpletest.check
deleted file mode 100644
index 95ea2c4c0d..0000000000
--- a/test/files/buildmanager/simpletest/simpletest.check
+++ /dev/null
@@ -1,11 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Removed(Definition(A.foo))))
-invalidate B.scala because inherited method removed [Removed(Definition(A.foo))]
-compiling Set(B.scala)
-B.scala:2: error: method foo overrides nothing
- override def foo = 2
- ^
diff --git a/test/files/buildmanager/simpletest/simpletest.test b/test/files/buildmanager/simpletest/simpletest.test
deleted file mode 100644
index 2c0be1502f..0000000000
--- a/test/files/buildmanager/simpletest/simpletest.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A1.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2280/A.scala b/test/files/buildmanager/t2280/A.scala
deleted file mode 100644
index 5febadeb06..0000000000
--- a/test/files/buildmanager/t2280/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-class A extends B
diff --git a/test/files/buildmanager/t2280/B.java b/test/files/buildmanager/t2280/B.java
deleted file mode 100644
index aef8e106e9..0000000000
--- a/test/files/buildmanager/t2280/B.java
+++ /dev/null
@@ -1,2 +0,0 @@
-public class B {}
-
diff --git a/test/files/buildmanager/t2280/t2280.check b/test/files/buildmanager/t2280/t2280.check
deleted file mode 100644
index 7ea7511c63..0000000000
--- a/test/files/buildmanager/t2280/t2280.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala B.java
-compiling Set(A.scala, B.java)
-Changes: Map()
-builder > B.java
-compiling Set(B.java)
-Changes: Map(class B -> List())
diff --git a/test/files/buildmanager/t2280/t2280.test b/test/files/buildmanager/t2280/t2280.test
deleted file mode 100644
index 2eda777853..0000000000
--- a/test/files/buildmanager/t2280/t2280.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala B.java
->>compile B.java
diff --git a/test/files/buildmanager/t2556_1/A.scala b/test/files/buildmanager/t2556_1/A.scala
deleted file mode 100644
index c6e200b217..0000000000
--- a/test/files/buildmanager/t2556_1/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- def x(i: Int) = i+"3"
-}
diff --git a/test/files/buildmanager/t2556_1/B.scala b/test/files/buildmanager/t2556_1/B.scala
deleted file mode 100644
index 8529587b56..0000000000
--- a/test/files/buildmanager/t2556_1/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class B extends A {
- def x(s: String) = s+"5"
-}
diff --git a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala b/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
deleted file mode 100644
index 4ac1045e13..0000000000
--- a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
- def x(i: String) = i+"3"
-}
-
diff --git a/test/files/buildmanager/t2556_1/t2556_1.check b/test/files/buildmanager/t2556_1/t2556_1.check
deleted file mode 100644
index 2e501c8f6f..0000000000
--- a/test/files/buildmanager/t2556_1/t2556_1.check
+++ /dev/null
@@ -1,12 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
-compiling Set(B.scala)
-B.scala:2: error: overriding method x in class A of type (i: String)String;
- method x needs `override' modifier
- def x(s: String) = s+"5"
- ^
diff --git a/test/files/buildmanager/t2556_1/t2556_1.test b/test/files/buildmanager/t2556_1/t2556_1.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2556_1/t2556_1.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2556_2/A.scala b/test/files/buildmanager/t2556_2/A.scala
deleted file mode 100644
index b8da5c8fb1..0000000000
--- a/test/files/buildmanager/t2556_2/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
- def x(i: Int) = i+"3"
-}
-
diff --git a/test/files/buildmanager/t2556_2/B.scala b/test/files/buildmanager/t2556_2/B.scala
deleted file mode 100644
index 80ff25d0ca..0000000000
--- a/test/files/buildmanager/t2556_2/B.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class B extends A
-
diff --git a/test/files/buildmanager/t2556_2/C.scala b/test/files/buildmanager/t2556_2/C.scala
deleted file mode 100644
index 0ab13e3757..0000000000
--- a/test/files/buildmanager/t2556_2/C.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class C extends B {
- def x(s: String) = s+"5"
-}
-
diff --git a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala b/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
deleted file mode 100644
index 4ac1045e13..0000000000
--- a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
- def x(i: String) = i+"3"
-}
-
diff --git a/test/files/buildmanager/t2556_2/t2556_2.check b/test/files/buildmanager/t2556_2/t2556_2.check
deleted file mode 100644
index cae4f72212..0000000000
--- a/test/files/buildmanager/t2556_2/t2556_2.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala C.scala
-compiling Set(A.scala, B.scala, C.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
-invalidate C.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
-compiling Set(B.scala, C.scala)
-C.scala:2: error: overriding method x in class A of type (i: String)String;
- method x needs `override' modifier
- def x(s: String) = s+"5"
- ^
diff --git a/test/files/buildmanager/t2556_2/t2556_2.test b/test/files/buildmanager/t2556_2/t2556_2.test
deleted file mode 100644
index 9f31bb6409..0000000000
--- a/test/files/buildmanager/t2556_2/t2556_2.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala C.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2556_3/A.scala b/test/files/buildmanager/t2556_3/A.scala
deleted file mode 100644
index 089a05f493..0000000000
--- a/test/files/buildmanager/t2556_3/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
- def x = 3
-}
-class B extends A
-
diff --git a/test/files/buildmanager/t2556_3/B.scala b/test/files/buildmanager/t2556_3/B.scala
deleted file mode 100644
index 0ec5ae4b55..0000000000
--- a/test/files/buildmanager/t2556_3/B.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object E {
- def main(args: Array[String]) =
- println( (new C).x )
-}
-
diff --git a/test/files/buildmanager/t2556_3/C.scala b/test/files/buildmanager/t2556_3/C.scala
deleted file mode 100644
index 403df8455e..0000000000
--- a/test/files/buildmanager/t2556_3/C.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class C extends B
-
diff --git a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala b/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
deleted file mode 100644
index 21cb2779f9..0000000000
--- a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
- def x = 3
-}
-class B
-
diff --git a/test/files/buildmanager/t2556_3/t2556_3.check b/test/files/buildmanager/t2556_3/t2556_3.check
deleted file mode 100644
index 34f90f7f9b..0000000000
--- a/test/files/buildmanager/t2556_3/t2556_3.check
+++ /dev/null
@@ -1,18 +0,0 @@
-builder > A.scala B.scala C.scala
-compiling Set(A.scala, B.scala, C.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), class B -> List(Changed(Class(B))[List((A,Object))]))
-invalidate C.scala because parents have changed [Changed(Class(B))[List((A,Object))]]
-invalidate B.scala because it references invalid (no longer inherited) definition [ParentChanged(Class(C))]
-compiling Set(B.scala, C.scala)
-B.scala:3: error: type mismatch;
- found : C
- required: ?{def x: ?}
-Note that implicit conversions are not applicable because they are ambiguous:
- both method any2Ensuring in object Predef of type [A](x: A)Ensuring[A]
- and method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
- are possible conversion functions from C to ?{def x: ?}
- println( (new C).x )
- ^
diff --git a/test/files/buildmanager/t2556_3/t2556_3.test b/test/files/buildmanager/t2556_3/t2556_3.test
deleted file mode 100644
index 9f31bb6409..0000000000
--- a/test/files/buildmanager/t2556_3/t2556_3.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala C.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2557/A.scala b/test/files/buildmanager/t2557/A.scala
deleted file mode 100644
index 3be55f19a6..0000000000
--- a/test/files/buildmanager/t2557/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
- def x = 3
-}
-
diff --git a/test/files/buildmanager/t2557/B.scala b/test/files/buildmanager/t2557/B.scala
deleted file mode 100644
index ea86a90079..0000000000
--- a/test/files/buildmanager/t2557/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait B extends A {
- override def x = super.x * 2
-}
-
diff --git a/test/files/buildmanager/t2557/C.scala b/test/files/buildmanager/t2557/C.scala
deleted file mode 100644
index dd575ac38d..0000000000
--- a/test/files/buildmanager/t2557/C.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait C extends A {
- override def x = super.x + 5
-}
diff --git a/test/files/buildmanager/t2557/D.scala b/test/files/buildmanager/t2557/D.scala
deleted file mode 100644
index 4e662a80ce..0000000000
--- a/test/files/buildmanager/t2557/D.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait D extends C with B
diff --git a/test/files/buildmanager/t2557/E.scala b/test/files/buildmanager/t2557/E.scala
deleted file mode 100644
index 2aee552675..0000000000
--- a/test/files/buildmanager/t2557/E.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait E extends D
diff --git a/test/files/buildmanager/t2557/F.scala b/test/files/buildmanager/t2557/F.scala
deleted file mode 100644
index e1996704e7..0000000000
--- a/test/files/buildmanager/t2557/F.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object F extends E {
- def main(args: Array[String]) =
- println(x)
-}
diff --git a/test/files/buildmanager/t2557/t2557.changes/D2.scala b/test/files/buildmanager/t2557/t2557.changes/D2.scala
deleted file mode 100644
index 67295f8e6d..0000000000
--- a/test/files/buildmanager/t2557/t2557.changes/D2.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-trait D extends B with C
-
diff --git a/test/files/buildmanager/t2557/t2557.check b/test/files/buildmanager/t2557/t2557.check
deleted file mode 100644
index 736ef3645e..0000000000
--- a/test/files/buildmanager/t2557/t2557.check
+++ /dev/null
@@ -1,10 +0,0 @@
-builder > A.scala B.scala C.scala D.scala E.scala F.scala
-compiling Set(A.scala, B.scala, C.scala, D.scala, E.scala, F.scala)
-Changes: Map()
-builder > D.scala
-compiling Set(D.scala)
-Changes: Map(trait D -> List(Changed(Class(D))[List((Object,Object), (C,B), (B,C))]))
-invalidate E.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]]
-invalidate F.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]]
-compiling Set(E.scala, F.scala)
-Changes: Map(object F -> List(), trait E -> List())
diff --git a/test/files/buildmanager/t2557/t2557.test b/test/files/buildmanager/t2557/t2557.test
deleted file mode 100644
index 6b0103092f..0000000000
--- a/test/files/buildmanager/t2557/t2557.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala C.scala D.scala E.scala F.scala
->>update D.scala=>D2.scala
->>compile D.scala
diff --git a/test/files/buildmanager/t2559/A.scala b/test/files/buildmanager/t2559/A.scala
deleted file mode 100644
index fb4f6e3545..0000000000
--- a/test/files/buildmanager/t2559/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-sealed trait A
-class B extends A
-class C extends A
-//class E extends A
-
diff --git a/test/files/buildmanager/t2559/D.scala b/test/files/buildmanager/t2559/D.scala
deleted file mode 100644
index 62dc5427f9..0000000000
--- a/test/files/buildmanager/t2559/D.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object D {
- def x(a: A) = if (a.isInstanceOf[B] || a.isInstanceOf[C]) ()
-}
-
diff --git a/test/files/buildmanager/t2559/t2559.changes/A2.scala b/test/files/buildmanager/t2559/t2559.changes/A2.scala
deleted file mode 100644
index 8e90594e2c..0000000000
--- a/test/files/buildmanager/t2559/t2559.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-sealed trait A
-class B extends A
-class C extends A
-class E extends A
-
diff --git a/test/files/buildmanager/t2559/t2559.check b/test/files/buildmanager/t2559/t2559.check
deleted file mode 100644
index 4d43838cf5..0000000000
--- a/test/files/buildmanager/t2559/t2559.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > A.scala D.scala
-compiling Set(A.scala, D.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class B -> List(), class C -> List(), class E -> List(Changed(Class(A))[class E extends a sealed trait A]), trait A -> List())
-invalidate D.scala because it references changed class [Changed(Class(A))[class E extends a sealed trait A]]
-compiling Set(D.scala)
-Changes: Map(object D -> List())
diff --git a/test/files/buildmanager/t2559/t2559.test b/test/files/buildmanager/t2559/t2559.test
deleted file mode 100644
index b787c5b39f..0000000000
--- a/test/files/buildmanager/t2559/t2559.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala D.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2562/A.scala b/test/files/buildmanager/t2562/A.scala
deleted file mode 100644
index 740cd1e868..0000000000
--- a/test/files/buildmanager/t2562/A.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object A
-{
- def x0 = B.x0
- def x1 = B.x1
- def x2 = B.x2
- def x3 = 3
-}
diff --git a/test/files/buildmanager/t2562/B.scala b/test/files/buildmanager/t2562/B.scala
deleted file mode 100644
index a524e5cc84..0000000000
--- a/test/files/buildmanager/t2562/B.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object B
-{
- def x0 = A.x1
- def x1 = A.x2
- def x2 = A.x3
-}
-
-
diff --git a/test/files/buildmanager/t2562/t2562.changes/A2.scala b/test/files/buildmanager/t2562/t2562.changes/A2.scala
deleted file mode 100644
index c560e1e816..0000000000
--- a/test/files/buildmanager/t2562/t2562.changes/A2.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object A
-{
- def x0 = B.x0
- def x1 = B.x1
- def x2 = B.x2
- def x3 = "3"
-}
-
diff --git a/test/files/buildmanager/t2562/t2562.check b/test/files/buildmanager/t2562/t2562.check
deleted file mode 100644
index 74575f28ea..0000000000
--- a/test/files/buildmanager/t2562/t2562.check
+++ /dev/null
@@ -1,12 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: <method> <triedcooking>]]
-compiling Set(B.scala)
-Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]))
-invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]]
-compiling Set(A.scala, B.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()String flags: <method>], Changed(Definition(A.x1))[method x1 changed from ()Int to ()String flags: <method> <triedcooking>], Changed(Definition(A.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()String flags: <method>], Changed(Definition(B.x1))[method x1 changed from ()Int to ()String flags: <method>]))
diff --git a/test/files/buildmanager/t2562/t2562.test b/test/files/buildmanager/t2562/t2562.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2562/t2562.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2649/A.scala b/test/files/buildmanager/t2649/A.scala
deleted file mode 100644
index 86cc3f2c15..0000000000
--- a/test/files/buildmanager/t2649/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object A {
- def x(zz: Int, yy: Int) = yy - zz
-}
diff --git a/test/files/buildmanager/t2649/B.scala b/test/files/buildmanager/t2649/B.scala
deleted file mode 100644
index 26c89518cb..0000000000
--- a/test/files/buildmanager/t2649/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B {
- def main(args: Array[String]): Unit =
- println( A.x(zz = 3, yy = 4) )
-}
diff --git a/test/files/buildmanager/t2649/t2649.changes/A2.scala b/test/files/buildmanager/t2649/t2649.changes/A2.scala
deleted file mode 100644
index 9a6309fca3..0000000000
--- a/test/files/buildmanager/t2649/t2649.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
- def x(yy: Int, zz: Int) = yy - zz
-}
-
diff --git a/test/files/buildmanager/t2649/t2649.check b/test/files/buildmanager/t2649/t2649.check
deleted file mode 100644
index d0f41f32ec..0000000000
--- a/test/files/buildmanager/t2649/t2649.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method> <triedcooking>]]
-compiling Set(B.scala)
-Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2649/t2649.test b/test/files/buildmanager/t2649/t2649.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2649/t2649.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_1/A.scala b/test/files/buildmanager/t2650_1/A.scala
deleted file mode 100644
index 74714a3c47..0000000000
--- a/test/files/buildmanager/t2650_1/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
- type S[_]
-}
-
diff --git a/test/files/buildmanager/t2650_1/B.scala b/test/files/buildmanager/t2650_1/B.scala
deleted file mode 100644
index 80f0e30259..0000000000
--- a/test/files/buildmanager/t2650_1/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait B extends A {
- type F = S[Int]
-}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala b/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
deleted file mode 100644
index 2b8ead4ff1..0000000000
--- a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A {
- type S
-}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.check b/test/files/buildmanager/t2650_1/t2650_1.check
deleted file mode 100644
index 2f9dd124af..0000000000
--- a/test/files/buildmanager/t2650_1/t2650_1.check
+++ /dev/null
@@ -1,12 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-warning: there were 1 feature warning(s); re-run with -feature for details
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]]
-compiling Set(B.scala)
-B.scala:2: error: B.this.S does not take type parameters
- type F = S[Int]
- ^
diff --git a/test/files/buildmanager/t2650_1/t2650_1.test b/test/files/buildmanager/t2650_1/t2650_1.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2650_1/t2650_1.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_2/A.scala b/test/files/buildmanager/t2650_2/A.scala
deleted file mode 100644
index bcea634485..0000000000
--- a/test/files/buildmanager/t2650_2/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A {
- type S = Int
-}
diff --git a/test/files/buildmanager/t2650_2/B.scala b/test/files/buildmanager/t2650_2/B.scala
deleted file mode 100644
index 22a3a9a48e..0000000000
--- a/test/files/buildmanager/t2650_2/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait B extends A {
- def x: S
- def y: Int = x
-}
diff --git a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala b/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
deleted file mode 100644
index 8274c1b62d..0000000000
--- a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
- type S = Long
-}
-
diff --git a/test/files/buildmanager/t2650_2/t2650_2.check b/test/files/buildmanager/t2650_2/t2650_2.check
deleted file mode 100644
index 53a0287dfc..0000000000
--- a/test/files/buildmanager/t2650_2/t2650_2.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]]
-compiling Set(B.scala)
-B.scala:3: error: type mismatch;
- found : B.this.S
- (which expands to) Long
- required: Int
- def y: Int = x
- ^
diff --git a/test/files/buildmanager/t2650_2/t2650_2.test b/test/files/buildmanager/t2650_2/t2650_2.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2650_2/t2650_2.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_3/A.scala b/test/files/buildmanager/t2650_3/A.scala
deleted file mode 100644
index cd13843eb9..0000000000
--- a/test/files/buildmanager/t2650_3/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
- type T = Int
- def x: T
-}
diff --git a/test/files/buildmanager/t2650_3/B.scala b/test/files/buildmanager/t2650_3/B.scala
deleted file mode 100644
index 46a8cf270a..0000000000
--- a/test/files/buildmanager/t2650_3/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
- def x(a: A): Int = a.x
-}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala b/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
deleted file mode 100644
index e5667b2539..0000000000
--- a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait A {
- type T = Long
- def x: T
-}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.check b/test/files/buildmanager/t2650_3/t2650_3.check
deleted file mode 100644
index 5c6326d59f..0000000000
--- a/test/files/buildmanager/t2650_3/t2650_3.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found : a.T
- (which expands to) Long
- required: Int
- def x(a: A): Int = a.x
- ^
diff --git a/test/files/buildmanager/t2650_3/t2650_3.test b/test/files/buildmanager/t2650_3/t2650_3.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2650_3/t2650_3.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2650_4/A.scala b/test/files/buildmanager/t2650_4/A.scala
deleted file mode 100644
index b9a519eb48..0000000000
--- a/test/files/buildmanager/t2650_4/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A {
- type T = Int
- type T2 = T
- def x: T2
-}
diff --git a/test/files/buildmanager/t2650_4/B.scala b/test/files/buildmanager/t2650_4/B.scala
deleted file mode 100644
index 46a8cf270a..0000000000
--- a/test/files/buildmanager/t2650_4/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
- def x(a: A): Int = a.x
-}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala b/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
deleted file mode 100644
index 0220e7b7bc..0000000000
--- a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A {
- type T = Long
- type T2 = T
- def x: T2
-}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.check b/test/files/buildmanager/t2650_4/t2650_4.check
deleted file mode 100644
index a4aeaddfbb..0000000000
--- a/test/files/buildmanager/t2650_4/t2650_4.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found : a.T2
- (which expands to) Long
- required: Int
- def x(a: A): Int = a.x
- ^
diff --git a/test/files/buildmanager/t2650_4/t2650_4.test b/test/files/buildmanager/t2650_4/t2650_4.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2650_4/t2650_4.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2651_2/A.scala b/test/files/buildmanager/t2651_2/A.scala
deleted file mode 100644
index d712f6febe..0000000000
--- a/test/files/buildmanager/t2651_2/A.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait A[T]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala b/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
deleted file mode 100644
index 7fb573e077..0000000000
--- a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
+++ /dev/null
@@ -1 +0,0 @@
-trait A[S]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.check b/test/files/buildmanager/t2651_2/t2651_2.check
deleted file mode 100644
index dd789b7565..0000000000
--- a/test/files/buildmanager/t2651_2/t2651_2.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List())
diff --git a/test/files/buildmanager/t2651_2/t2651_2.test b/test/files/buildmanager/t2651_2/t2651_2.test
deleted file mode 100644
index d0614473ce..0000000000
--- a/test/files/buildmanager/t2651_2/t2651_2.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2651_3/A.scala b/test/files/buildmanager/t2651_3/A.scala
deleted file mode 100644
index 14f9e4662f..0000000000
--- a/test/files/buildmanager/t2651_3/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A[T, S] {
- def x: T
-}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala b/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
deleted file mode 100644
index 51bf27d1fa..0000000000
--- a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait A[T, S] {
- def x: S
-}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.check b/test/files/buildmanager/t2651_3/t2651_3.check
deleted file mode 100644
index 2a60e3d806..0000000000
--- a/test/files/buildmanager/t2651_3/t2651_3.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <method> <deferred>]))
diff --git a/test/files/buildmanager/t2651_3/t2651_3.test b/test/files/buildmanager/t2651_3/t2651_3.test
deleted file mode 100644
index d0614473ce..0000000000
--- a/test/files/buildmanager/t2651_3/t2651_3.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2651_4/A.scala b/test/files/buildmanager/t2651_4/A.scala
deleted file mode 100644
index 63f2a1643e..0000000000
--- a/test/files/buildmanager/t2651_4/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A[T, S] {
- def x: T
- def y(a: T)
- def z[B <: T]
-}
diff --git a/test/files/buildmanager/t2651_4/B.scala b/test/files/buildmanager/t2651_4/B.scala
deleted file mode 100644
index b33dbde676..0000000000
--- a/test/files/buildmanager/t2651_4/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-trait B extends A[Int, String] {
- def x = 3
-}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala b/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
deleted file mode 100644
index f155129d13..0000000000
--- a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-trait A[S, T] {
- def x: T
- def y(a: T)
- def z[B <: T]
-}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check
deleted file mode 100644
index 74e5d8f99b..0000000000
--- a/test/files/buildmanager/t2651_4/t2651_4.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <method> <deferred>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <method> <deferred>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found : Int(3)
- required: String
- def x = 3
- ^
diff --git a/test/files/buildmanager/t2651_4/t2651_4.test b/test/files/buildmanager/t2651_4/t2651_4.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2651_4/t2651_4.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2653/A.scala b/test/files/buildmanager/t2653/A.scala
deleted file mode 100644
index fb17a158c7..0000000000
--- a/test/files/buildmanager/t2653/A.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class A[+T]
-
diff --git a/test/files/buildmanager/t2653/B.scala b/test/files/buildmanager/t2653/B.scala
deleted file mode 100644
index 8f55a88e05..0000000000
--- a/test/files/buildmanager/t2653/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
- val a: A[Any] = new A[Int]
-}
diff --git a/test/files/buildmanager/t2653/t2653.changes/A2.scala b/test/files/buildmanager/t2653/t2653.changes/A2.scala
deleted file mode 100644
index 51d13cce6e..0000000000
--- a/test/files/buildmanager/t2653/t2653.changes/A2.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class A[T]
-
diff --git a/test/files/buildmanager/t2653/t2653.check b/test/files/buildmanager/t2653/t2653.check
deleted file mode 100644
index 36781522af..0000000000
--- a/test/files/buildmanager/t2653/t2653.check
+++ /dev/null
@@ -1,15 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.<init>))[constructor A changed from ()A[T] to ()A[T] flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed class [Changed(Class(A))[ tparams: List((type T,type T))]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found : A[Int]
- required: A[Any]
-Note: Int <: Any, but class A is invariant in type T.
-You may wish to define T as +T instead. (SLS 4.5)
- val a: A[Any] = new A[Int]
- ^
diff --git a/test/files/buildmanager/t2653/t2653.test b/test/files/buildmanager/t2653/t2653.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2653/t2653.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2654/A.scala b/test/files/buildmanager/t2654/A.scala
deleted file mode 100644
index 75f396d039..0000000000
--- a/test/files/buildmanager/t2654/A.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-class A
-
diff --git a/test/files/buildmanager/t2654/B.scala b/test/files/buildmanager/t2654/B.scala
deleted file mode 100644
index a18aec3dbe..0000000000
--- a/test/files/buildmanager/t2654/B.scala
+++ /dev/null
@@ -1 +0,0 @@
-class B extends A
diff --git a/test/files/buildmanager/t2654/t2654.changes/A2.scala b/test/files/buildmanager/t2654/t2654.changes/A2.scala
deleted file mode 100644
index c302edbd85..0000000000
--- a/test/files/buildmanager/t2654/t2654.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
- private def x = 5
-}
-
diff --git a/test/files/buildmanager/t2654/t2654.check b/test/files/buildmanager/t2654/t2654.check
deleted file mode 100644
index 68f6e8efc0..0000000000
--- a/test/files/buildmanager/t2654/t2654.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List())
diff --git a/test/files/buildmanager/t2654/t2654.test b/test/files/buildmanager/t2654/t2654.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2654/t2654.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2655/A.scala b/test/files/buildmanager/t2655/A.scala
deleted file mode 100644
index b2c54ac47d..0000000000
--- a/test/files/buildmanager/t2655/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
- def x(i: => String) = ()
-}
-
diff --git a/test/files/buildmanager/t2655/B.scala b/test/files/buildmanager/t2655/B.scala
deleted file mode 100644
index 6c1918c0fb..0000000000
--- a/test/files/buildmanager/t2655/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B {
- val x = A.x("3")
-}
diff --git a/test/files/buildmanager/t2655/t2655.changes/A2.scala b/test/files/buildmanager/t2655/t2655.changes/A2.scala
deleted file mode 100644
index 0d6a7c69bb..0000000000
--- a/test/files/buildmanager/t2655/t2655.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
- def x(i: Function0[String]) = ()
-}
-
diff --git a/test/files/buildmanager/t2655/t2655.check b/test/files/buildmanager/t2655/t2655.check
deleted file mode 100644
index 41ce65a2f5..0000000000
--- a/test/files/buildmanager/t2655/t2655.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method> <triedcooking>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method> <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found : String("3")
- required: () => String
- val x = A.x("3")
- ^
diff --git a/test/files/buildmanager/t2655/t2655.test b/test/files/buildmanager/t2655/t2655.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2655/t2655.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2657/A.scala b/test/files/buildmanager/t2657/A.scala
deleted file mode 100644
index 2a6c62d29c..0000000000
--- a/test/files/buildmanager/t2657/A.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- implicit def y(i: Int): String = i.toString
-}
diff --git a/test/files/buildmanager/t2657/B.scala b/test/files/buildmanager/t2657/B.scala
deleted file mode 100644
index 77869890db..0000000000
--- a/test/files/buildmanager/t2657/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B extends A {
- val x: String = 3
-}
-
diff --git a/test/files/buildmanager/t2657/t2657.changes/A2.scala b/test/files/buildmanager/t2657/t2657.changes/A2.scala
deleted file mode 100644
index 7dc99d425e..0000000000
--- a/test/files/buildmanager/t2657/t2657.changes/A2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- def y(i: Int): String = i.toString
-}
diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check
deleted file mode 100644
index 7bff078f56..0000000000
--- a/test/files/buildmanager/t2657/t2657.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-warning: there were 1 feature warning(s); re-run with -feature for details
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found : Int(3)
- required: String
- val x: String = 3
- ^
diff --git a/test/files/buildmanager/t2657/t2657.test b/test/files/buildmanager/t2657/t2657.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2657/t2657.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2789/A.scala b/test/files/buildmanager/t2789/A.scala
deleted file mode 100644
index 08d5bc840c..0000000000
--- a/test/files/buildmanager/t2789/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
- implicit def e: E = new E
- def x(i: Int)(implicit y: E): String = ""
-}
-class E
diff --git a/test/files/buildmanager/t2789/B.scala b/test/files/buildmanager/t2789/B.scala
deleted file mode 100644
index dcefbeec1b..0000000000
--- a/test/files/buildmanager/t2789/B.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object B extends A {
- val y = x(3)
-}
diff --git a/test/files/buildmanager/t2789/t2789.changes/A2.scala b/test/files/buildmanager/t2789/t2789.changes/A2.scala
deleted file mode 100644
index 4ba3814e71..0000000000
--- a/test/files/buildmanager/t2789/t2789.changes/A2.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
- def e: E = new E
- def x(i: Int)(implicit y: E): String = ""
-}
-class E
diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check
deleted file mode 100644
index 066561ac44..0000000000
--- a/test/files/buildmanager/t2789/t2789.check
+++ /dev/null
@@ -1,11 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]), class E -> List())
-invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]]
-compiling Set(B.scala)
-B.scala:2: error: could not find implicit value for parameter y: E
- val y = x(3)
- ^
diff --git a/test/files/buildmanager/t2789/t2789.test b/test/files/buildmanager/t2789/t2789.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2789/t2789.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2790/A.scala b/test/files/buildmanager/t2790/A.scala
deleted file mode 100644
index 6e9c1a90db..0000000000
--- a/test/files/buildmanager/t2790/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object A {
- def x(f: String, g: Int): Int = g
- def x(f: Int, g: Int = 3): Int = g
-}
-
diff --git a/test/files/buildmanager/t2790/B.scala b/test/files/buildmanager/t2790/B.scala
deleted file mode 100644
index 441055ca12..0000000000
--- a/test/files/buildmanager/t2790/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B {
- val y = A.x(5)
-}
-
diff --git a/test/files/buildmanager/t2790/t2790.changes/A2.scala b/test/files/buildmanager/t2790/t2790.changes/A2.scala
deleted file mode 100644
index 704ef4e96e..0000000000
--- a/test/files/buildmanager/t2790/t2790.changes/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object A {
- def x(f: String, g: Int = 3): Int = g
- def x(f: Int, g: Int): Int = g
-}
diff --git a/test/files/buildmanager/t2790/t2790.check b/test/files/buildmanager/t2790/t2790.check
deleted file mode 100644
index 13d61dac42..0000000000
--- a/test/files/buildmanager/t2790/t2790.check
+++ /dev/null
@@ -1,13 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(object A -> List(Added(Definition(A.x)), Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]]
-compiling Set(B.scala)
-B.scala:2: error: type mismatch;
- found : Int(5)
- required: String
- val y = A.x(5)
- ^
diff --git a/test/files/buildmanager/t2790/t2790.test b/test/files/buildmanager/t2790/t2790.test
deleted file mode 100644
index 6f3bd03361..0000000000
--- a/test/files/buildmanager/t2790/t2790.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A.scala B.scala
->>update A.scala=>A2.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t2792/A1.scala b/test/files/buildmanager/t2792/A1.scala
deleted file mode 100644
index 96dc0ef933..0000000000
--- a/test/files/buildmanager/t2792/A1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object A {
- val x = new C
-}
diff --git a/test/files/buildmanager/t2792/A2.scala b/test/files/buildmanager/t2792/A2.scala
deleted file mode 100644
index e55e681c76..0000000000
--- a/test/files/buildmanager/t2792/A2.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object B {
- import A.x.y
- val z = y
-}
diff --git a/test/files/buildmanager/t2792/A3.scala b/test/files/buildmanager/t2792/A3.scala
deleted file mode 100644
index cd083cdb34..0000000000
--- a/test/files/buildmanager/t2792/A3.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class C {
- val y = 4
-}
diff --git a/test/files/buildmanager/t2792/t2792.changes/A1_1.scala b/test/files/buildmanager/t2792/t2792.changes/A1_1.scala
deleted file mode 100644
index 00ee05f273..0000000000
--- a/test/files/buildmanager/t2792/t2792.changes/A1_1.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object A {
- var x = new C
-}
diff --git a/test/files/buildmanager/t2792/t2792.check b/test/files/buildmanager/t2792/t2792.check
deleted file mode 100644
index 00a2b83469..0000000000
--- a/test/files/buildmanager/t2792/t2792.check
+++ /dev/null
@@ -1,14 +0,0 @@
-builder > A1.scala A2.scala A3.scala
-compiling Set(A1.scala, A2.scala, A3.scala)
-Changes: Map()
-builder > A1.scala
-compiling Set(A1.scala)
-Changes: Map(object A -> List(Added(Definition(A.x_$eq)), Changed(Definition(A.x))[value x changed to variable x]))
-invalidate A2.scala because it references changed definition [Changed(Definition(A.x))[value x changed to variable x]]
-compiling Set(A2.scala)
-A2.scala:2: error: stable identifier required, but A.x found.
- import A.x.y
- ^
-A2.scala:3: error: not found: value y
- val z = y
- ^
diff --git a/test/files/buildmanager/t2792/t2792.test b/test/files/buildmanager/t2792/t2792.test
deleted file mode 100644
index f199950bba..0000000000
--- a/test/files/buildmanager/t2792/t2792.test
+++ /dev/null
@@ -1,3 +0,0 @@
->>compile A1.scala A2.scala A3.scala
->>update A1.scala=>A1_1.scala
->>compile A1.scala
diff --git a/test/files/buildmanager/t3045/A.java b/test/files/buildmanager/t3045/A.java
deleted file mode 100644
index d1acb00cd6..0000000000
--- a/test/files/buildmanager/t3045/A.java
+++ /dev/null
@@ -1,7 +0,0 @@
-public interface A {
- public class C implements A {}
-}
-
-class B {
- static class C {}
-}
diff --git a/test/files/buildmanager/t3045/t3045.check b/test/files/buildmanager/t3045/t3045.check
deleted file mode 100644
index 5e4e71e045..0000000000
--- a/test/files/buildmanager/t3045/t3045.check
+++ /dev/null
@@ -1,3 +0,0 @@
-builder > A.java
-compiling Set(A.java)
-Changes: Map()
diff --git a/test/files/buildmanager/t3045/t3045.test b/test/files/buildmanager/t3045/t3045.test
deleted file mode 100644
index 6cf7e35543..0000000000
--- a/test/files/buildmanager/t3045/t3045.test
+++ /dev/null
@@ -1 +0,0 @@
->>compile A.java
diff --git a/test/files/buildmanager/t3054/bar/Bar.java b/test/files/buildmanager/t3054/bar/Bar.java
deleted file mode 100644
index e1b056d4e5..0000000000
--- a/test/files/buildmanager/t3054/bar/Bar.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package bar;
-import foo.Foo$;
-
-
-public class Bar {
- void bar() { Foo$.MODULE$.foo(); }
-}
diff --git a/test/files/buildmanager/t3054/foo/Foo.scala b/test/files/buildmanager/t3054/foo/Foo.scala
deleted file mode 100644
index c0fcd97390..0000000000
--- a/test/files/buildmanager/t3054/foo/Foo.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-package foo
-
-class Foo {
- def foo() = println("foo")
-}
diff --git a/test/files/buildmanager/t3054/t3054.check b/test/files/buildmanager/t3054/t3054.check
deleted file mode 100644
index 97cca8862e..0000000000
--- a/test/files/buildmanager/t3054/t3054.check
+++ /dev/null
@@ -1,3 +0,0 @@
-builder > bar/Bar.java foo/Foo.scala
-compiling Set(bar/Bar.java, foo/Foo.scala)
-Changes: Map()
diff --git a/test/files/buildmanager/t3054/t3054.test b/test/files/buildmanager/t3054/t3054.test
deleted file mode 100644
index 903df24b13..0000000000
--- a/test/files/buildmanager/t3054/t3054.test
+++ /dev/null
@@ -1 +0,0 @@
->>compile bar/Bar.java foo/Foo.scala
diff --git a/test/files/buildmanager/t3059/A.scala b/test/files/buildmanager/t3059/A.scala
deleted file mode 100644
index 0dd25f6647..0000000000
--- a/test/files/buildmanager/t3059/A.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class A extends B {
- private def getBar = List(1,2,3)
- lazy val bar: List[Int] = getBar
-}
diff --git a/test/files/buildmanager/t3059/B.scala b/test/files/buildmanager/t3059/B.scala
deleted file mode 100644
index 46596870ac..0000000000
--- a/test/files/buildmanager/t3059/B.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-abstract class B {
- private def getFoo = 12
- lazy val foo: Int = getFoo
-}
diff --git a/test/files/buildmanager/t3059/t3059.check b/test/files/buildmanager/t3059/t3059.check
deleted file mode 100644
index 4a8076aae1..0000000000
--- a/test/files/buildmanager/t3059/t3059.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List()) \ No newline at end of file
diff --git a/test/files/buildmanager/t3059/t3059.test b/test/files/buildmanager/t3059/t3059.test
deleted file mode 100644
index 6f3749dc4b..0000000000
--- a/test/files/buildmanager/t3059/t3059.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala B.scala
->>compile A.scala \ No newline at end of file
diff --git a/test/files/buildmanager/t3133/A.java b/test/files/buildmanager/t3133/A.java
deleted file mode 100644
index c4e7f3af0e..0000000000
--- a/test/files/buildmanager/t3133/A.java
+++ /dev/null
@@ -1,7 +0,0 @@
-public class A {
- class Foo {}
-
- public A(Foo a) {}
-
- private void bar(Foo z) {}
-}
diff --git a/test/files/buildmanager/t3133/t3133.check b/test/files/buildmanager/t3133/t3133.check
deleted file mode 100644
index 5e4e71e045..0000000000
--- a/test/files/buildmanager/t3133/t3133.check
+++ /dev/null
@@ -1,3 +0,0 @@
-builder > A.java
-compiling Set(A.java)
-Changes: Map()
diff --git a/test/files/buildmanager/t3133/t3133.test b/test/files/buildmanager/t3133/t3133.test
deleted file mode 100644
index 6cf7e35543..0000000000
--- a/test/files/buildmanager/t3133/t3133.test
+++ /dev/null
@@ -1 +0,0 @@
->>compile A.java
diff --git a/test/files/buildmanager/t3140/A.scala b/test/files/buildmanager/t3140/A.scala
deleted file mode 100644
index f7768044d1..0000000000
--- a/test/files/buildmanager/t3140/A.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class As {
- trait A {
- def foo(parents: String): A = {
- (() => parents)
- null
- }
- }
-}
diff --git a/test/files/buildmanager/t3140/t3140.check b/test/files/buildmanager/t3140/t3140.check
deleted file mode 100644
index 008d5a9618..0000000000
--- a/test/files/buildmanager/t3140/t3140.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class As -> List(), object As$A$class -> List(), trait As$A -> List())
diff --git a/test/files/buildmanager/t3140/t3140.test b/test/files/buildmanager/t3140/t3140.test
deleted file mode 100644
index 392e0d365f..0000000000
--- a/test/files/buildmanager/t3140/t3140.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/buildmanager/t4215/A.scala b/test/files/buildmanager/t4215/A.scala
deleted file mode 100644
index 9db40b0fee..0000000000
--- a/test/files/buildmanager/t4215/A.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class A {
- def B() {
- object C
- }
-}
diff --git a/test/files/buildmanager/t4215/t4215.check b/test/files/buildmanager/t4215/t4215.check
deleted file mode 100644
index d9ec9a743a..0000000000
--- a/test/files/buildmanager/t4215/t4215.check
+++ /dev/null
@@ -1,6 +0,0 @@
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(), object A$C$2 -> List())
diff --git a/test/files/buildmanager/t4215/t4215.test b/test/files/buildmanager/t4215/t4215.test
deleted file mode 100644
index 392e0d365f..0000000000
--- a/test/files/buildmanager/t4215/t4215.test
+++ /dev/null
@@ -1,2 +0,0 @@
->>compile A.scala
->>compile A.scala
diff --git a/test/files/continuations-run/implicit-infer-annotations.check b/test/files/continuations-run/implicit-infer-annotations.check
new file mode 100644
index 0000000000..e8206c4319
--- /dev/null
+++ b/test/files/continuations-run/implicit-infer-annotations.check
@@ -0,0 +1,5 @@
+Range(5, 6, 7, 8, 9, 10)
+Range(5, 6, 7, 8, 9, 10)
+15
+List(10, 1, 2, 3)
+Range(5, 6, 7, 8, 9, 10)
diff --git a/test/files/continuations-run/implicit-infer-annotations.scala b/test/files/continuations-run/implicit-infer-annotations.scala
new file mode 100644
index 0000000000..3f0e959f60
--- /dev/null
+++ b/test/files/continuations-run/implicit-infer-annotations.scala
@@ -0,0 +1,59 @@
+import annotation._
+
+object A {
+ class foo[-B,+C] extends StaticAnnotation with TypeConstraint
+
+ def shift[A, B, C](fun: (A => B) => C): A @foo[B, C] = ???
+ def reset[A, C](ctx: => (A @foo[A, C])): C = ???
+
+ def m1 = reset { shift { f: (Int => Range) => f(5) }.to(10) }
+}
+
+object B {
+ import scala.util.continuations._
+
+ def m1 = reset { shift { f: (Int => Range) => f(5) }.to(10) }
+ def m2 = reset { val a = shift { f: (Int => Range) => f(5) } ; a.to(10) }
+
+ val x1 = reset{
+ shift{ cont: (Int => Range) =>
+ cont(5)
+ }.to(10)
+ }
+
+ val x2 = reset{
+ val a = shift{ cont: (Int => Range) =>
+ cont(5)
+ }
+ a.to(10)
+ } // x is now Range(5, 6, 7, 8, 9, 10)
+
+ val x3 = reset{
+ shift{ cont: (Int => Int) =>
+ cont(5)
+ } + 10
+ } // x is now 15
+
+ val x4 = reset{
+ 10 :: shift{ cont: (List[Int] => List[Int]) =>
+ cont(List(1, 2, 3))
+ }
+ } // x is List(10, 1, 2, 3)
+
+ val x5 = reset{
+ new scala.runtime.RichInt(shift{ cont: (Int => Range) =>
+ cont(5)
+ }) to 10
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ import B._
+ println(x1)
+ println(x2)
+ println(x3)
+ println(x4)
+ println(x5)
+ }
+}
diff --git a/test/files/detach-neg/det_bar.check b/test/files/detach-neg/det_bar.check
deleted file mode 100644
index 70b47581a5..0000000000
--- a/test/files/detach-neg/det_bar.check
+++ /dev/null
@@ -1,4 +0,0 @@
-det_bar.scala:7: error: detach inapplicable for method bar
- detach(bar)
- ^
-one error found
diff --git a/test/files/detach-neg/det_bar.scala b/test/files/detach-neg/det_bar.scala
deleted file mode 100644
index 862afb1d6e..0000000000
--- a/test/files/detach-neg/det_bar.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.remoting._
-class A(y: Int) {
- var z = 2
- var bar = (x: Int) => x + y + z
- def foo(x: Int): Int = x + y + z
- bar = (x: Int) => x * y
- detach(bar)
-}
-
-object test extends App {
- val a = new A(1)
- println(a.bar(2))
-}
diff --git a/test/files/detach-run/actor-run.check b/test/files/detach-run/actor-run.check
deleted file mode 100644
index 9448ddd5fe..0000000000
--- a/test/files/detach-run/actor-run.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Server.main 8889
-Client.main 127.0.0.1 8889
-yInstVal = 10
-zLocVal = 1000
-result received: 11111
diff --git a/test/files/detach-run/actor/Client.scala b/test/files/detach-run/actor/Client.scala
deleted file mode 100644
index 12573e24d3..0000000000
--- a/test/files/detach-run/actor/Client.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-import scala.actors.Actor._, ClientHelper._
-import scala.actors.remote._, RemoteActor._
-import scala.remoting._, Debug._
-
-object Foo {
- def trace(msg: String) { info("[Foo.trace] "+msg)}
-}
-object Client {
- val yInstVal: Int = 10
- var yInstVar: Int = 99
- object Bar {
- def trace(msg: String) { info("[Bar.trace] "+msg) }
- }
- def main(args: Array[String]) {
- init(args)
- actor {
- val server = select(Node(host, port), 'Server)
- val zLocVal: Int = 1000
- var zLocVar: Int = 9998
- server ! detach(
- (x: Int) => {
- println("yInstVal = "+yInstVal)
- this.trace("yInstVar = "+yInstVar)
- Bar.trace("zLocVal = "+zLocVal)
- Foo.trace("zLocVar = "+zLocVar)
- zLocVar += 2
- System.out.println("zLocVal = "+zLocVal)
- Debug.info("zLocVar = "+zLocVar)
- x + yInstVal + yInstVar + zLocVal + zLocVar
- })
- react {
- case result: Int =>
- println("result received: " + result)
- Predef.exit(0)
- }
- }
- }
- private def trace(msg: String) { info("[Client.trace] "+msg) }
-}
-
-object ClientHelper {
- private var _host = "127.0.0.1"
- private var _port = 8888
- def host = _host
- def port = _port
- def init(args: Array[String]) {
- try { _host = args(0) } catch { case _ => }
- try { _port = args(1).toInt } catch { case _ => }
- }
-}
diff --git a/test/files/detach-run/actor/Server.scala b/test/files/detach-run/actor/Server.scala
deleted file mode 100644
index b56d22f744..0000000000
--- a/test/files/detach-run/actor/Server.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-import scala.actors.Actor._
-import scala.actors.remote.RemoteActor._
-
-object Server extends ServerConsole {
- private def computation(f: Int => Int): Int = {
- //some time-consuming task
- f(2)
- }
- def main(args: Array[String]) {
- actor {
- classLoader = serverClassLoader
- alive(args(0).toInt)
- register('Server, self)
- loopWhile(isRunning) {
- react {
- case f: (Int => Int) =>
- val result = computation(f)
- sender ! result
- }
- }
- }
- }
-}
diff --git a/test/files/detach-run/actor/ServerConsole.scala b/test/files/detach-run/actor/ServerConsole.scala
deleted file mode 100644
index 8ebd9d4c2e..0000000000
--- a/test/files/detach-run/actor/ServerConsole.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-import java.io.{BufferedReader, InputStreamReader}
-
-import scala.compat.Platform.currentTime
-import scala.remoting.Debug, Debug._
-
-trait ServerConsole extends Thread {
- private val startTime = currentTime
- actors.Debug.level = // e.g. 3 // info+warning+error
- try { System.getProperty("scala.actors.logLevel", "0").toInt }
- catch { case e => 0 }
-
- start()
-
- val serverClassLoader = {
- import java.rmi.server.RMIClassLoader
- val codebase = System.getProperty("java.rmi.server.codebase")
- info("[ServerConsole] codebase="+codebase)
- RMIClassLoader getClassLoader codebase
- }
-
- private var isTerminated = false
-
- def terminate() { isTerminated = false }
-
- def isRunning = !isTerminated
-
- override def run() {
- val in = new BufferedReader(new InputStreamReader(System.in))
- var quit = false
- while (!quit) {
- val args = getArgs(in)
- if (args contains "quit")
- quit = true
- if (args contains "cls") {
- println(ERASE_SCREEN)
- println(CURSOR_HOME)
- }
- if (args contains "warning")
- Debug.level = Level.WARNING
- if (args contains "info")
- Debug.level = Level.INFO
- if (args contains "silent")
- Debug.level = Level.SILENT
- }
- terminate()
- println("Server exited ("+mkTimeString(currentTime - startTime)+")")
- sys.exit(0)
- }
-
- protected def trace(msg: String) {
- Debug.info("[ServerConsole.trace] "+msg)
- }
-
- private def getArgs(in: BufferedReader): List[String] = {
- val input = try { in.readLine() } catch { case _ => null }
- if (input != null) (input.trim split "\\s+").toList else Nil
- }
-
- private def mkTimeString(time: Long): String = {
- def twoDigits(i: Long) = (if (i < 10) "0" else "")+i
- val sec = time / 1000
- val min = sec / 60
- val h = min / 60
- twoDigits(h) +":"+
- twoDigits(min - h * 60)+":"+
- twoDigits(sec - min * 60)
- }
-
- private val ERASE_SCREEN = "\033[2J"
- private val CURSOR_HOME = "\033[H"
-}
diff --git a/test/files/detach-run/actor/actor.flags b/test/files/detach-run/actor/actor.flags
deleted file mode 100644
index 55eed8bbcd..0000000000
--- a/test/files/detach-run/actor/actor.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xpluginsdir ../../../../build/pack/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable
diff --git a/test/files/detach-run/actor/actor.scala b/test/files/detach-run/actor/actor.scala
deleted file mode 100644
index 23a10d6982..0000000000
--- a/test/files/detach-run/actor/actor.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-object Test {
-
- val name = "actor"
- val host = "127.0.0.1"
- val port = 8889
-
- def main(args: Array[String]) {
- setenv()
- println("Server.main "+port)
- Server.main(Array(port.toString))
- println("Client.main "+host+" "+port)
- Client.main(Array(host, port.toString))
- Server.terminate()
- }
-
- private def setenv() {
- import Env._
-
- // Java properties for server & client
- System.setProperty("scala.actors.logLevel", actors_logLevel)
- System.setProperty("scala.remoting.logLevel", logLevel)
- System.setProperty("java.security.manager", "")
- System.setProperty("java.security.policy", policyFile)
- // Java properties for server only
- System.setProperty("java.rmi.server.codebase", deployUrl)
- System.setProperty("java.rmi.server.hostname", host)
- System.setProperty("java.rmi.server.useCodebaseOnly", "true")
-
- // application-specific classes to be deployed and accessed via URL
- // (i.e. detached closure, proxy interfaces and proxy stubs)
- val classNames = List(
- "$anonfun$main$1$proxy",
- "$anonfun$main$1$proxyImpl_Stub",
- "Bar$proxy",
- "Bar$proxyImpl_Stub",
- "Client$$anonfun$main$1$$anonfun$apply$1$detach",
- "Client$proxy",
- "Client$proxyImpl_Stub",
- "Foo$proxy",
- "Foo$proxyImpl_Stub")
-
- val proxyImplNames =
- for (n <- classNames; i = n lastIndexOf "_Stub"; if i > 0)
- yield n.substring(0, i)
-
- generatePolicyFile()
- generateRmiStubs(proxyImplNames)
- generateJarFile(classNames)
- }
-}
-
-object Env {
- import java.io._, java.util.jar._
-
- val actors_logLevel = "0"
- // = "3" // info+warning+error
- val logLevel = "silent"
- // = "info" // debug user code only
- // = "info,lib" // debug user & library code
-
- // we assume an Apache server is running locally for deployment
- private val sep = File.separator
- val docPath = System.getProperty("user.home")+sep+"public_html"
- val docRoot = "http://127.0.0.1/~"+System.getProperty("user.name")
-
- private val policyTmpl =
- System.getProperty("partest.cwd")+sep+Test.name+sep+"java.policy"
- val outPath = System.getProperty("partest.output")
- val libPath = System.getProperty("partest.lib")
- val policyFile = outPath+sep+"java.policy"
- val codebaseDir = outPath+sep+"-"
-
- assert((new File(docPath)).isDirectory,
- "Root directory \""+docPath+"\" not found")
- val deployJar = docPath+sep+Test.name+"_deploy.jar"
- val deployUrl = docRoot+"/"+Test.name+"_deploy.jar"
-
- def generatePolicyFile() {
- val in = new BufferedReader(new FileReader(policyTmpl))
- val out = new PrintWriter(new BufferedWriter(new FileWriter(policyFile)))
- var line = in.readLine()
- while (line != null) {
- val line1 = line.replaceAll("@PROJECT_LIB_BASE@", codebaseDir)
- out.println(line1)
- line = in.readLine()
- }
- in.close()
- out.close()
- }
-
- def generateRmiStubs(classNames: List[String]) {
- val options = List(
- "-v1.2",
- "-classpath "+libPath+File.pathSeparator+outPath,
- "-d "+outPath)
- rmic(options, classNames)
- //ls(outPath)
- }
-
- def generateJarFile(classNames: List[String]) {
- val out = new JarOutputStream(new FileOutputStream(deployJar))
- classNames foreach (name => try {
- val classFile = name+".class"
- val in = new FileInputStream(outPath+sep+classFile)
- out putNextEntry new JarEntry(classFile)
- val buf = new Array[Byte](512)
- var len = in read buf
- while (len != -1) {
- out.write(buf, 0, len)
- len = in read buf
- }
- in.close()
- } catch {
- case e: FileNotFoundException => println(e)
- })
- out.close()
- }
-
- private def ls(path: String) { exec("ls -al "+path) }
-
- private def rmic(options: List[String], classNames: List[String]) {
- val javaHome = scala.util.Properties.javaHome
- val jdkHome =
- if (javaHome endsWith "jre") javaHome.substring(0, javaHome.length-4)
- else javaHome
- val rmicExt = if (scala.util.Properties.isWin) ".exe" else ""
- val rmicCmd = jdkHome+sep+"bin"+sep+"rmic"+rmicExt
- val cmdLine = rmicCmd+options.mkString(" ", " ", "")+
- classNames.mkString(" "," ","")
- // println(cmdLine)
- exec(cmdLine)
- }
-
- private def exec(command: String) {
- val proc = Runtime.getRuntime exec command
- proc.waitFor()
- val out = new BufferedReader(new InputStreamReader(proc.getInputStream))
- var line = out.readLine()
- while (line != null) {
- println(line)
- line = out.readLine()
- }
- out.close()
- val err = new BufferedReader(new InputStreamReader(proc.getErrorStream))
- line = err.readLine()
- while (line != null) {
- println(line)
- line = err.readLine()
- }
- err.close()
- }
-}
-
diff --git a/test/files/detach-run/actor/java.policy b/test/files/detach-run/actor/java.policy
deleted file mode 100644
index 4beb2ca26b..0000000000
--- a/test/files/detach-run/actor/java.policy
+++ /dev/null
@@ -1,25 +0,0 @@
-// See http://java.sun.com/javase/6/docs/technotes/guides/security/permissions.html
-// See http://mindprod.com/jgloss/policyfile.html
-// The policy expands ${/} to the correct path or folder delimiter on your host platform.
-
-// Actions available with SocketPermission: accept, connect, listen, resolve
-// 1) The "resolve" action is implied when any of the other actions are present.
-// 2) The "listen" action is only meaningful when used with "localhost".
-
-grant {
- permission java.net.SocketPermission "*:80", "connect,accept,listen";
- permission java.net.SocketPermission "*:1024-", "connect,accept,listen";
- permission java.util.PropertyPermission "scala.remoting.logLevel", "read";
- permission java.util.PropertyPermission "scala.remoting.port", "read";
-};
-
-grant codeBase "@PROJECT_LIB_BASE@" {
- permission java.lang.RuntimePermission "getClassLoader";
- permission java.util.PropertyPermission "java.rmi.server.codebase", "read";
- permission java.util.PropertyPermission "java.rmi.server.hostname", "read";
- permission java.util.PropertyPermission "sun.rmi.dgc.server.gcInterval", "read,write";
-};
-
-//grant {
-// permission java.security.AllPermission;
-//};
diff --git a/test/files/detach-run/basic-run.check b/test/files/detach-run/basic-run.check
deleted file mode 100644
index 6463d97497..0000000000
--- a/test/files/detach-run/basic-run.check
+++ /dev/null
@@ -1,5 +0,0 @@
-Server.main 8889
-> Client.main 127.0.0.1 8889
-yInstVal = 10
-zLocVal = 1000
-result received: 11111
diff --git a/test/files/detach-run/basic/Client.scala b/test/files/detach-run/basic/Client.scala
deleted file mode 100644
index f8eddb041d..0000000000
--- a/test/files/detach-run/basic/Client.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-import java.net._, Thread._, ClientHelper._
-import scala.remoting._, Debug._
-
-object Foo {
- def trace(s: String) { info("[Foo.trace] "+s)}
-}
-object Client {
- val yInstVal: Int = 10
- var yInstVar: Int = 99
- object Bar {
- def trace(s: String) { info("[Bar.trace] "+s) }
- }
- def main(args: Array[String]) {
- init(args)
- val server = new Channel(host, port)
- val zLocVal: Int = 1000
- var zLocVar: Int = 9998
- server ! detach(
- (x: Int) => {
- println("yInstVal = "+yInstVal)
- this.trace("yInstVar = "+yInstVar)
- Bar.trace("zLocVal = "+zLocVal)
- Foo.trace("zLocVar = "+zLocVar)
- zLocVar += 2
- System.out.println("zLocVal = "+zLocVal)
- Debug.info("zLocVar = "+zLocVar)
- x + yInstVal + yInstVar + zLocVal + zLocVar
- })
- val result = server.receiveInt
- println("result received: " + result)
- }
- private def trace(s: String) { info("[Client.trace] "+s) }
-}
-
-object ClientHelper {
- private var _host = "127.0.0.1"
- private var _port = 8888
- def host = _host
- def port = _port
- def init(args: Array[String]) {
- try { _host = args(0) } catch { case _ => }
- try { _port = args(1).toInt } catch { case _ => }
- }
-}
diff --git a/test/files/detach-run/basic/Server.scala b/test/files/detach-run/basic/Server.scala
deleted file mode 100644
index f8aa02a4ba..0000000000
--- a/test/files/detach-run/basic/Server.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-import scala.remoting.ServerChannel
-
-object Server extends ServerConsole {
- private def computation(f: Int => Int): Int = {
- //some time-consuming task
- f(2)
- }
- def main(args: Array[String]) {
- val server = new ServerChannel(args(0).toInt)
- loop {
- val client = server.accept
- val f = client.receive[Int => Int]
- val result = computation(f)
- client ! result
- }
- server.close()
- }
-}
diff --git a/test/files/detach-run/basic/ServerConsole.scala b/test/files/detach-run/basic/ServerConsole.scala
deleted file mode 100644
index 65b81c0ca1..0000000000
--- a/test/files/detach-run/basic/ServerConsole.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-import java.io._
-
-import scala.compat.Platform.currentTime
-import scala.remoting.Debug, Debug._
-
-trait ServerConsole extends Thread {
- private val startTime = currentTime
-
- start()
-
- private var isTerminated = false
-
- def terminate() { isTerminated = true }
-
- protected def loop(block: => Unit) {
- while (!isTerminated) {
- try {
- block
- }
- catch {
- case e: ObjectStreamException =>
- trace("Object stream error ("+e.getMessage+")")
- case e: EOFException =>
- trace("Connection lost")
- case e: ClassNotFoundException =>
- trace("Class not found")
- case e =>
- trace("Server error: "+e)
- }
- }
- }
-
- override def run() {
- val in = new BufferedReader(new InputStreamReader(System.in))
- var quit = false
- while (!quit) {
- val args = getArgs(in)
- if (args contains "quit")
- quit = true
- if (args contains "cls") {
- println(ERASE_SCREEN)
- println(CURSOR_HOME)
- }
- if (args contains "warning")
- Debug.level = Level.WARNING
- if (args contains "info")
- Debug.level = Level.INFO
- if (args contains "silent")
- Debug.level = Level.SILENT
- }
- terminate()
- println("Server exited ("+mkTimeString(currentTime - startTime)+")")
- exit(0)
-
- }
-
- protected def trace(msg: String) {
- Debug.info("[ServerConsole.trace] "+msg)
- }
-
- private def getArgs(in: BufferedReader): List[String] = {
- print("> ")
- val input = try { in.readLine() } catch { case _ => null }
- if (input != null) (input.trim split "\\s+").toList else Nil
- }
-
- private def mkTimeString(time: Long): String = {
- def twoDigits(i: Long) = (if (i < 10) "0" else "")+i
- val sec = time / 1000
- val min = sec / 60
- val h = min / 60
- twoDigits(h) +":"+
- twoDigits(min - h * 60)+":"+
- twoDigits(sec - min * 60)
- }
-
- private val ERASE_SCREEN = "\033[2J"
- private val CURSOR_HOME = "\033[H"
-}
diff --git a/test/files/detach-run/basic/basic.flags b/test/files/detach-run/basic/basic.flags
deleted file mode 100644
index 55eed8bbcd..0000000000
--- a/test/files/detach-run/basic/basic.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xpluginsdir ../../../../build/pack/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable
diff --git a/test/files/detach-run/basic/basic.scala b/test/files/detach-run/basic/basic.scala
deleted file mode 100644
index 4d0fc2d933..0000000000
--- a/test/files/detach-run/basic/basic.scala
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * @author Stephane Micheloud
- */
-
-object Test {
-
- val name = "basic"
- val host = "127.0.0.1"
- val port = 8889
-
- def main(args: Array[String]) {
- setenv()
- println("Server.main "+port)
- server.start()
- println("Client.main "+host+" "+port)
- client.start()
- server.terminate()
- }
-
- private var server = new ServerThread(port)
- private var client = new ClientThread(host, port)
-
- private class ServerThread(port: Int) extends Runnable {
- private var th = new Thread(this)
- def start() { th.start(); Thread.sleep(1000) }
- def run() { Server.main(Array(port.toString)) }
- def terminate() { Server.terminate(); sys.exit(0) }
- }
-
- private class ClientThread(host: String, port: Int) extends Runnable {
- private var th = new Thread(this)
- def start() { th.start(); th.join() }
- def run() { Client.main(Array(host, port.toString)) }
- }
-
- private def setenv() {
- import Env._
-
- // Java properties for server & client
- System.setProperty("scala.remoting.logLevel", logLevel)
- System.setProperty("java.security.manager", "")
- System.setProperty("java.security.policy", policyFile)
- // Java properties for server only
- System.setProperty("java.rmi.server.codebase", deployUrl)
- System.setProperty("java.rmi.server.hostname", host)
- System.setProperty("java.rmi.server.useCodebaseOnly", "true")
-
- // application-secific classes to be deployed and accessed via URL
- // (i.e. detached closure, proxy interfaces and proxy stubs)
- val classNames = List(
- "Bar$proxy",
- "Bar$proxyImpl_Stub",
- "Client$$anonfun$main$1$detach",
- "Client$proxy",
- "Client$proxyImpl_Stub",
- "Foo$proxy",
- "Foo$proxyImpl_Stub")
-
- val proxyImplNames =
- for (n <- classNames; i = n lastIndexOf "_Stub"; if i > 0)
- yield n.substring(0, i)
-
- generatePolicyFile()
- generateRmiStubs(proxyImplNames)
- generateJarFile(classNames)
- }
-}
-
-object Env {
- import java.io._, java.util.jar._
-
- val actors_logLevel = "0"
- // = "3" // info+warning+error
- val logLevel = "silent"
- // = "info" // debug user code only
- // = "info,lib" // debug user & library code
-
- // we assume an Apache server is running locally for deployment
- private val sep = File.separator
- val docPath = System.getProperty("user.home")+sep+"public_html"
- val docRoot = "http://127.0.0.1/~"+System.getProperty("user.name")
-
- private val policyTmpl =
- System.getProperty("partest.cwd")+sep+Test.name+sep+"java.policy"
- val outPath = System.getProperty("partest.output")
- val libPath = System.getProperty("partest.lib")
- val policyFile = outPath+sep+"java.policy"
- val codebaseDir = outPath+sep+"-"
-
- assert((new File(docPath)).isDirectory,
- "Root directory \""+docPath+"\" not found")
- val deployJar = docPath+sep+Test.name+"_deploy.jar"
- val deployUrl = docRoot+"/"+Test.name+"_deploy.jar"
-
- def generatePolicyFile() {
- val in = new BufferedReader(new FileReader(policyTmpl))
- val out = new PrintWriter(new BufferedWriter(new FileWriter(policyFile)))
- var line = in.readLine()
- while (line != null) {
- val line1 = line.replaceAll("@PROJECT_LIB_BASE@", codebaseDir)
- out.println(line1)
- line = in.readLine()
- }
- in.close()
- out.close()
- }
-
- def generateRmiStubs(classNames: List[String]) {
- val options = List(
- "-v1.2",
- "-classpath "+libPath+File.pathSeparator+outPath,
- "-d "+outPath)
- rmic(options, classNames)
- //ls(outPath)
- }
-
- def generateJarFile(classNames: List[String]) {
- val out = new JarOutputStream(new FileOutputStream(deployJar))
- classNames foreach (name => try {
- val classFile = name+".class"
- val in = new FileInputStream(outPath+sep+classFile)
- out putNextEntry new JarEntry(classFile)
- val buf = new Array[Byte](512)
- var len = in read buf
- while (len != -1) {
- out.write(buf, 0, len)
- len = in read buf
- }
- in.close()
- } catch {
- case e: FileNotFoundException => println(e)
- })
- out.close()
- }
-
- private def ls(path: String) { exec("ls -al "+path) }
-
- private def rmic(options: List[String], classNames: List[String]) {
- val javaHome = scala.util.Properties.javaHome
- val jdkHome =
- if (javaHome endsWith "jre") javaHome.substring(0, javaHome.length-4)
- else javaHome
- val rmicExt = if (scala.util.Properties.isWin) ".exe" else ""
- val rmicCmd = jdkHome+sep+"bin"+sep+"rmic"+rmicExt
- val cmdLine = rmicCmd+options.mkString(" ", " ", "")+
- classNames.mkString(" "," ","")
- // println(cmdLine)
- exec(cmdLine)
- }
-
- private def exec(command: String) {
- val proc = Runtime.getRuntime exec command
- proc.waitFor()
- val out = new BufferedReader(new InputStreamReader(proc.getInputStream))
- var line = out.readLine()
- while (line != null) {
- println(line)
- line = out.readLine()
- }
- out.close()
- val err = new BufferedReader(new InputStreamReader(proc.getErrorStream))
- line = err.readLine()
- while (line != null) {
- println(line)
- line = err.readLine()
- }
- err.close()
- }
-}
diff --git a/test/files/detach-run/basic/java.policy b/test/files/detach-run/basic/java.policy
deleted file mode 100644
index 92c1045c3d..0000000000
--- a/test/files/detach-run/basic/java.policy
+++ /dev/null
@@ -1,26 +0,0 @@
-// See http://java.sun.com/javase/6/docs/technotes/guides/security/permissions.html
-// See http://mindprod.com/jgloss/policyfile.html
-// The policy expands ${/} to the correct path or folder delimiter on your host platform.
-
-// Actions available with SocketPermission: accept, connect, listen, resolve
-// 1) The "resolve" action is implied when any of the other actions are present.
-// 2) The "listen" action is only meaningful when used with "localhost".
-
-grant {
- permission java.net.SocketPermission "*:80", "connect,accept,listen";
- permission java.net.SocketPermission "*:1024-", "connect,accept,listen";
- permission java.util.PropertyPermission "scala.remoting.logLevel", "read";
- permission java.util.PropertyPermission "scala.remoting.port", "read";
-};
-
-grant codeBase "@PROJECT_LIB_BASE@" {
- permission java.lang.RuntimePermission "getClassLoader";
- permission java.lang.RuntimePermission "createClassLoader";
- permission java.util.PropertyPermission "java.rmi.server.codebase", "read";
- permission java.util.PropertyPermission "java.rmi.server.hostname", "read";
- permission java.util.PropertyPermission "sun.rmi.dgc.server.gcInterval", "read,write";
-};
-
-//grant {
-// permission java.security.AllPermission;
-//};
diff --git a/test/files/instrumented/InstrumentationTest.check b/test/files/instrumented/InstrumentationTest.check
index f0f447560a..0c570fa12c 100644
--- a/test/files/instrumented/InstrumentationTest.check
+++ b/test/files/instrumented/InstrumentationTest.check
@@ -4,5 +4,7 @@ Method call statistics:
1 Foo1.someMethod()I
1 instrumented/Foo2.<init>()V
1 instrumented/Foo2.someMethod()I
+ 1 scala/DeprecatedConsole.<init>()V
1 scala/Predef$.println(Ljava/lang/Object;)V
+ 1 scala/io/AnsiColor$class.$init$(Lscala/io/AnsiColor;)V
1 scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean;
diff --git a/test/files/jvm/duration-tck.scala b/test/files/jvm/duration-tck.scala
index d0f13816a6..b2573448c7 100644
--- a/test/files/jvm/duration-tck.scala
+++ b/test/files/jvm/duration-tck.scala
@@ -176,8 +176,9 @@ object Test extends App {
Thread.sleep(1.second.toMillis)
- { val l = dead.timeLeft; assert(l <= 1.second, s"$l > 1.second") }
- { val l = dead2.timeLeft; assert(l <= 1.second, s"$l > 1.second") }
+ // unfortunately it can happen that the sleep() returns early without throwing
+ { val l = dead.timeLeft; assert(l <= 1100.millis, s"$l > 1100.millis") }
+ { val l = dead2.timeLeft; assert(l <= 1100.millis, s"$l > 1100.millis") }
// test integer mul/div
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index 0efa83fbd9..01c9cf82ba 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -77,7 +77,7 @@ object FutureTests extends MinimalScalaTest {
val logThrowable: Throwable => Unit = p.trySuccess(_)
val ec: ExecutionContext = ExecutionContext.fromExecutor(null, logThrowable)
- val t = new NotImplementedError("foo")
+ val t = new InterruptedException()
val f = Future(throw t)(ec)
Await.result(p.future, 2.seconds) mustBe t
}
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index 96b57c7742..44930c2932 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -357,10 +357,8 @@ defined class Term
scala> def f(e: Exp) = e match { // non-exhaustive warning here
case _:Fact => 3
}
-<console>:18: warning: match is not exhaustive!
-missing combination Exp
-missing combination Term
-
+<console>:18: warning: match may not be exhaustive.
+It would fail on the following inputs: Exp(), Term()
def f(e: Exp) = e match { // non-exhaustive warning here
^
f: (e: Exp)Int
diff --git a/test/files/jvm/interpreter.scala b/test/files/jvm/interpreter.scala
index f45eb034a9..bd1851053f 100644
--- a/test/files/jvm/interpreter.scala
+++ b/test/files/jvm/interpreter.scala
@@ -2,7 +2,7 @@ import scala.tools.nsc._
import scala.tools.partest.ReplTest
object Test extends ReplTest {
- override def extraSettings = "-deprecation -Xoldpatmat"
+ override def extraSettings = "-deprecation"
def code = <code>
// basics
3+4
diff --git a/test/files/jvm/manifests-new.scala b/test/files/jvm/manifests-new.scala
index f730be67bb..3937fdec69 100644
--- a/test/files/jvm/manifests-new.scala
+++ b/test/files/jvm/manifests-new.scala
@@ -56,7 +56,7 @@ object Test1 extends TestUtil {
}
object Test2 {
- import scala.util.Marshal._
+ import Marshal._
println("()="+load[Unit](dump(())))
println("true="+load[Boolean](dump(true)))
println("a="+load[Char](dump('a')))
@@ -88,6 +88,38 @@ object Test2 {
println()
}
+object Marshal {
+ import java.io._
+ import scala.reflect.ClassTag
+
+ def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
+ val ba = new ByteArrayOutputStream(512)
+ val out = new ObjectOutputStream(ba)
+ out.writeObject(t)
+ out.writeObject(o)
+ out.close()
+ ba.toByteArray()
+ }
+
+ @throws(classOf[IOException])
+ @throws(classOf[ClassCastException])
+ @throws(classOf[ClassNotFoundException])
+ def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
+ val found = in.readObject.asInstanceOf[ClassTag[_]]
+ try {
+ found.runtimeClass.asSubclass(expected.runtimeClass)
+ in.readObject.asInstanceOf[A]
+ } catch {
+ case _: ClassCastException =>
+ in.close()
+ throw new ClassCastException("type mismatch;"+
+ "\n found : "+found+
+ "\n required: "+expected)
+ }
+ }
+}
+
trait TestUtil {
import java.io._
def write[A](o: A): Array[Byte] = {
diff --git a/test/files/jvm/manifests-old.scala b/test/files/jvm/manifests-old.scala
index 241966fd9d..bb1928f094 100644
--- a/test/files/jvm/manifests-old.scala
+++ b/test/files/jvm/manifests-old.scala
@@ -55,7 +55,7 @@ object Test1 extends TestUtil {
}
object Test2 {
- import scala.util.Marshal._
+ import Marshal._
println("()="+load[Unit](dump(())))
println("true="+load[Boolean](dump(true)))
println("a="+load[Char](dump('a')))
@@ -87,6 +87,38 @@ object Test2 {
println()
}
+object Marshal {
+ import java.io._
+ import scala.reflect.ClassTag
+
+ def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
+ val ba = new ByteArrayOutputStream(512)
+ val out = new ObjectOutputStream(ba)
+ out.writeObject(t)
+ out.writeObject(o)
+ out.close()
+ ba.toByteArray()
+ }
+
+ @throws(classOf[IOException])
+ @throws(classOf[ClassCastException])
+ @throws(classOf[ClassNotFoundException])
+ def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
+ val found = in.readObject.asInstanceOf[ClassTag[_]]
+ try {
+ found.runtimeClass.asSubclass(expected.runtimeClass)
+ in.readObject.asInstanceOf[A]
+ } catch {
+ case _: ClassCastException =>
+ in.close()
+ throw new ClassCastException("type mismatch;"+
+ "\n found : "+found+
+ "\n required: "+expected)
+ }
+ }
+}
+
trait TestUtil {
import java.io._
def write[A](o: A): Array[Byte] = {
diff --git a/test/files/jvm/named-args-in-order.check b/test/files/jvm/named-args-in-order.check
new file mode 100644
index 0000000000..29a3ba55d3
--- /dev/null
+++ b/test/files/jvm/named-args-in-order.check
@@ -0,0 +1,3 @@
+bytecode identical
+bytecode identical
+bytecode identical
diff --git a/test/files/jvm/named-args-in-order/SameBytecode.scala b/test/files/jvm/named-args-in-order/SameBytecode.scala
new file mode 100644
index 0000000000..c00641777e
--- /dev/null
+++ b/test/files/jvm/named-args-in-order/SameBytecode.scala
@@ -0,0 +1,9 @@
+class SameBytecode {
+ def foo(a: Int, b: String) = 0
+ def foo(a: Int, b: Any) = 0
+
+ def a = foo(0, "")
+ def b = foo(a = 0, "")
+ def c = foo(0, b = "")
+ def d = foo(a = 0, b = "")
+} \ No newline at end of file
diff --git a/test/files/jvm/named-args-in-order/Test.scala b/test/files/jvm/named-args-in-order/Test.scala
new file mode 100644
index 0000000000..36b9cbc1d1
--- /dev/null
+++ b/test/files/jvm/named-args-in-order/Test.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ def sameAsA(meth: String) =
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, meth))
+ Seq("b", "c", "d").foreach(sameAsA)
+ }
+}
diff --git a/test/files/jvm/non-fatal-tests.scala b/test/files/jvm/non-fatal-tests.scala
index 471a9d227a..22c7cba51f 100644
--- a/test/files/jvm/non-fatal-tests.scala
+++ b/test/files/jvm/non-fatal-tests.scala
@@ -7,7 +7,8 @@ trait NonFatalTests {
Seq(new StackOverflowError,
new RuntimeException,
new Exception,
- new Throwable)
+ new Throwable,
+ new NotImplementedError)
//Fatals
val fatals: Seq[Throwable] =
@@ -15,8 +16,7 @@ trait NonFatalTests {
new OutOfMemoryError,
new LinkageError,
new VirtualMachineError {},
- new Throwable with scala.util.control.ControlThrowable,
- new NotImplementedError)
+ new Throwable with scala.util.control.ControlThrowable)
def testFatalsUsingApply(): Unit = {
fatals foreach { t => assert(NonFatal(t) == false) }
diff --git a/test/files/jvm/nooptimise/Foo_1.flags b/test/files/jvm/nooptimise/Foo_1.flags
new file mode 100644
index 0000000000..9686c20775
--- /dev/null
+++ b/test/files/jvm/nooptimise/Foo_1.flags
@@ -0,0 +1 @@
+-optimise -Ynooptimise \ No newline at end of file
diff --git a/test/files/jvm/nooptimise/Foo_1.scala b/test/files/jvm/nooptimise/Foo_1.scala
new file mode 100644
index 0000000000..c6f1b06c8e
--- /dev/null
+++ b/test/files/jvm/nooptimise/Foo_1.scala
@@ -0,0 +1,8 @@
+class Foo_1 {
+ def foo() {
+ // optimization will remove this magic 3 from appearing in the source
+ // so -Ynooptimize should prevent that
+ val x = 3
+
+ }
+}
diff --git a/test/files/jvm/nooptimise/Test.scala b/test/files/jvm/nooptimise/Test.scala
new file mode 100644
index 0000000000..ec8daa6e16
--- /dev/null
+++ b/test/files/jvm/nooptimise/Test.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("Foo_1")
+ val methodNode = getMethod(classNode, "foo")
+ // if optimization didn't run then
+ // there should be some useless instructions
+ // with the magic constant 3
+ val expected = 1
+ val got = countMagicThrees(methodNode.instructions)
+ assert(got == expected, s"expected $expected but got $got magic threes")
+ }
+
+ def countMagicThrees(insnList: InsnList): Int = {
+ def isMagicThree(node: asm.tree.AbstractInsnNode): Boolean =
+ (node.getOpcode == asm.Opcodes.ICONST_3)
+ insnList.iterator.asScala.count(isMagicThree)
+ }
+}
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 34b64938b4..f2c47aad77 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -604,6 +604,7 @@ object Test {
Test7
Test8
Test9_parallel
+ Test10_util
}
}
@@ -669,3 +670,17 @@ object Test9_parallel {
throw e
}
}
+
+//############################################################################
+// Test classes in package scala.util
+
+object Test10_util {
+ import scala.util.Random
+ def rep[A](n: Int)(f: => A) { if (n > 0) { f; rep(n-1)(f) } }
+
+ try {
+ val random = new Random(345)
+ val random2: Random = read(write(random))
+ rep(5) { assert(random.nextInt == random2.nextInt) }
+ }
+}
diff --git a/test/files/jvm/t1143-2/t1143-2.scala b/test/files/jvm/t1143-2/t1143-2.scala
index 44b1febd8b..13ab13b48c 100644
--- a/test/files/jvm/t1143-2/t1143-2.scala
+++ b/test/files/jvm/t1143-2/t1143-2.scala
@@ -16,43 +16,39 @@ object Serialize {
}
}
-@serializable
@SerialVersionUID(1L)
-class VarModel[T]( getter: => T, setter: T => Unit )
-{
+class VarModel[T](getter: => T, setter: T => Unit) extends Serializable {
Serialize.write(getter)
Serialize.write(setter)
- def this( getter: => T ) = this( getter, null )
+ def this(getter: => T) = this(getter, null)
def getObject: AnyRef = getter.asInstanceOf[AnyRef]
- def setObject( v: AnyRef ) = {
- if( setter==null )
- throw new RuntimeException( "Tried to set readonly model!")
- setter( v.asInstanceOf[T] )
+ def setObject(v: AnyRef) = {
+ if(setter==null)
+ throw new RuntimeException("Tried to set readonly model!")
+ setter(v.asInstanceOf[T])
}
def detach = ()
}
-@serializable
@SerialVersionUID(1L)
-class Printer( p: VarModel[String] ) {
- def print = println( p.getObject );
+class Printer(p: VarModel[String]) extends Serializable {
+ def print = println(p.getObject)
}
class Component extends Marker { }
class Form extends Component { }
-@serializable
@SerialVersionUID(1L)
-class Main {
+class Main extends Serializable {
var pass = "pass"
- def main(args : Array[String]) : Unit = {
+ def main(args: Array[String]): Unit = {
val f = new Form {
- val p = new Printer( new VarModel( pass, s => pass = s ) );
+ val p = new Printer(new VarModel(pass, s => pass = s))
p.print
}
()
diff --git a/test/files/jvm/t1143.scala b/test/files/jvm/t1143.scala
index 7dd374f432..eb03c7224e 100644
--- a/test/files/jvm/t1143.scala
+++ b/test/files/jvm/t1143.scala
@@ -16,9 +16,8 @@ object Serialize {
}
}
-@serializable
@SerialVersionUID(1L)
-class VarModel[T](getter: => T, setter: T => Unit) {
+class VarModel[T](getter: => T, setter: T => Unit) extends Serializable {
Serialize.write(getter)
Serialize.write(setter)
@@ -35,23 +34,20 @@ class VarModel[T](getter: => T, setter: T => Unit) {
def detach = ()
}
-@serializable
@SerialVersionUID(1L)
-class Printer(p: VarModel[String]) {
+class Printer(p: VarModel[String]) extends Serializable {
def print = println(p.getObject)
}
-@serializable
@SerialVersionUID(1L)
-class Component {
+class Component extends Serializable {
}
class Form extends Component {
}
-@serializable
@SerialVersionUID(1L)
-class Main {
+class Main extends Serializable {
var pass = "pass"
def main(args: Array[String]) {
val f = new Form {
diff --git a/test/files/jvm/t1342/SI.scala b/test/files/jvm/t1342/SI.scala
index 8e3b753210..7c37d4bcd7 100644
--- a/test/files/jvm/t1342/SI.scala
+++ b/test/files/jvm/t1342/SI.scala
@@ -4,7 +4,7 @@ class SI extends JI {
}
}
-object Test extends Application {
+object Test extends App {
val x: JI = new SI
x.varArgsMethod("one", "two")
}
diff --git a/test/files/jvm/t1600.scala b/test/files/jvm/t1600.scala
index 7e23687425..69179c1ba4 100644
--- a/test/files/jvm/t1600.scala
+++ b/test/files/jvm/t1600.scala
@@ -69,8 +69,7 @@ object Test {
var hashCodeModifier = 0
}
- @serializable
- class Foo {
+ class Foo extends Serializable {
override def hashCode = System.identityHashCode(this) + Foo.hashCodeModifier
}
}
diff --git a/test/files/jvm/ticket2163/ticket2163.java b/test/files/jvm/t2163/t2163.java
index b6511d241c..83bd37d212 100644
--- a/test/files/jvm/ticket2163/ticket2163.java
+++ b/test/files/jvm/t2163/t2163.java
@@ -1,9 +1,9 @@
import java.util.*;
-public class ticket2163 {
+public class t2163 {
public void test() {
List<Integer> array = new ArrayList<Integer>();
- Ticket2163Scala<List> foo = new Ticket2163Scala<List>(array);
+ T2163Scala<List> foo = new T2163Scala<List>(array);
foo.bar(array);
}
}
diff --git a/test/files/jvm/t2163/t2163.scala b/test/files/jvm/t2163/t2163.scala
new file mode 100644
index 0000000000..f73b520cbe
--- /dev/null
+++ b/test/files/jvm/t2163/t2163.scala
@@ -0,0 +1,5 @@
+class T2163Scala[CC[X]](x: CC[Int]) {
+ def bar[DD[X]](meh: DD[Int]): CC[Int] = x
+}
+
+object Test extends App {}
diff --git a/test/files/jvm/t2470.cmds b/test/files/jvm/t2470.cmds
deleted file mode 100644
index b4ef0f4aeb..0000000000
--- a/test/files/jvm/t2470.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac Action.java Task.java
-scalac Test_1.scala
-scalac Read_Classfile_2.scala
diff --git a/test/files/jvm/t2570/Test.scala b/test/files/jvm/t2570/Test.scala
index 7944aedae6..f1cba53546 100644
--- a/test/files/jvm/t2570/Test.scala
+++ b/test/files/jvm/t2570/Test.scala
@@ -1,3 +1,3 @@
class Test2 extends Test1[Test3[Test4]]
class Test4
-object Test extends Application {} \ No newline at end of file
+object Test extends App {} \ No newline at end of file
diff --git a/test/files/jvm/t3003.cmds b/test/files/jvm/t3003.cmds
deleted file mode 100644
index c00396627c..0000000000
--- a/test/files/jvm/t3003.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-javac Annot.java
-scalac Test_1.scala
diff --git a/test/files/jvm/t3415/HelloWorld.scala b/test/files/jvm/t3415/HelloWorld.scala
index 53bf55e444..5ef012390e 100644
--- a/test/files/jvm/t3415/HelloWorld.scala
+++ b/test/files/jvm/t3415/HelloWorld.scala
@@ -1,4 +1,4 @@
-object Test extends Application {
+object Test extends App {
@Hello
def foo() { }
}
diff --git a/test/files/jvm/ticket4283/AbstractFoo.java b/test/files/jvm/t4283/AbstractFoo.java
index 74f3827fe3..74f3827fe3 100644
--- a/test/files/jvm/ticket4283/AbstractFoo.java
+++ b/test/files/jvm/t4283/AbstractFoo.java
diff --git a/test/files/jvm/ticket4283/ScalaBipp.scala b/test/files/jvm/t4283/ScalaBipp.scala
index 36dea9f4de..36dea9f4de 100644
--- a/test/files/jvm/ticket4283/ScalaBipp.scala
+++ b/test/files/jvm/t4283/ScalaBipp.scala
diff --git a/test/files/jvm/ticket4283/Test.scala b/test/files/jvm/t4283/Test.scala
index 9bbfaab928..9bbfaab928 100644
--- a/test/files/jvm/ticket4283/Test.scala
+++ b/test/files/jvm/t4283/Test.scala
diff --git a/test/files/jvm/t7006/Foo_1.flags b/test/files/jvm/t7006/Foo_1.flags
new file mode 100644
index 0000000000..37b2116413
--- /dev/null
+++ b/test/files/jvm/t7006/Foo_1.flags
@@ -0,0 +1 @@
+-optimise -Ydebug -Xfatal-warnings
diff --git a/test/files/jvm/t7006/Foo_1.scala b/test/files/jvm/t7006/Foo_1.scala
new file mode 100644
index 0000000000..995619ce6b
--- /dev/null
+++ b/test/files/jvm/t7006/Foo_1.scala
@@ -0,0 +1,10 @@
+class Foo_1 {
+ def foo {
+ try {
+ val x = 3 // this will be optimized away, leaving a useless jump only block
+ } finally {
+ print("hello")
+ }
+ while(true){} // ensure infinite loop doesn't break the algoirthm
+ }
+}
diff --git a/test/files/jvm/t7006/Test.scala b/test/files/jvm/t7006/Test.scala
new file mode 100644
index 0000000000..065a23510e
--- /dev/null
+++ b/test/files/jvm/t7006/Test.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("Foo_1")
+ val methodNode = getMethod(classNode, "foo")
+ assert(count(methodNode.instructions, asm.Opcodes.NOP) == 0)
+ assert(count(methodNode.instructions, asm.Opcodes.GOTO) == 1)
+ }
+
+ def count(insnList: InsnList, opcode: Int): Int = {
+ def isNop(node: asm.tree.AbstractInsnNode): Boolean =
+ (node.getOpcode == opcode)
+ insnList.iterator.asScala.count(isNop)
+ }
+}
diff --git a/test/files/jvm/t7181/Foo_1.scala b/test/files/jvm/t7181/Foo_1.scala
new file mode 100644
index 0000000000..f9dfdd4442
--- /dev/null
+++ b/test/files/jvm/t7181/Foo_1.scala
@@ -0,0 +1,26 @@
+class Exception1 extends RuntimeException
+class Exception2 extends RuntimeException
+
+class Foo_1 {
+ def foo(baz: Baz) {
+ try {
+ baz.bar
+ } catch {
+ case _: Exception1 => println("exception 1")
+ case _: Exception2 => println("exception 2")
+ } finally {
+ // this should be the only copy of the magic constant 3
+ // making it easy to detect copies of this finally block
+ println(s"finally ${3}")
+ }
+ println(s"normal flow")
+ }
+}
+
+trait Baz {
+ // does it throw? who knows? This way
+ // I can ensure that no optimization that honors
+ // separate compilation could ever
+ // change the exception handling structure
+ def bar: Unit
+}
diff --git a/test/files/jvm/t7181/Test.scala b/test/files/jvm/t7181/Test.scala
new file mode 100644
index 0000000000..35dba436c1
--- /dev/null
+++ b/test/files/jvm/t7181/Test.scala
@@ -0,0 +1,24 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("Foo_1")
+ val methodNode = getMethod(classNode, "foo")
+ // there should be 2 copies of the finally block, each with the magic constant 3
+ // one for the "normal" exit
+ // one for the uncaught exception exit
+ // prior to this PR there would have been 4 since each exception handler would also get a copy
+ val expected = 2
+ val got = countMagicThrees(methodNode.instructions)
+ assert(got == expected, s"expected $expected but got $got magic threes")
+ }
+
+ def countMagicThrees(insnList: InsnList): Int = {
+ def isMagicThree(node: asm.tree.AbstractInsnNode): Boolean =
+ (node.getOpcode == asm.Opcodes.ICONST_3)
+ insnList.iterator.asScala.count(isMagicThree)
+ }
+}
diff --git a/test/files/jvm/ticket2163/ticket2163.scala b/test/files/jvm/ticket2163/ticket2163.scala
deleted file mode 100644
index d30bfe251b..0000000000
--- a/test/files/jvm/ticket2163/ticket2163.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-class Ticket2163Scala[CC[X]](x: CC[Int]) {
- def bar[DD[X]](meh: DD[Int]): CC[Int] = x
-}
-
-object Test extends Application {}
diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala
index 3befc7ff3f..47bd16a467 100644
--- a/test/files/jvm/typerep.scala
+++ b/test/files/jvm/typerep.scala
@@ -280,100 +280,74 @@ object TypeRep {
override def toString = "Nothing"
}
- @serializable
case class ClassRep[A](elemRep: TypeRep[A]) extends TypeRep[Class[A]] {
override def toString = "Class[" + elemRep + "]"
}
- @serializable
case class SomeRep[A](elemRep: TypeRep[A]) extends TypeRep[Some[A]] {
override def toString = "Some[" + elemRep + "]"
}
- @serializable
case class NoneRep[A](elemRep: TypeRep[A]) extends TypeRep[Option[A]] {
override def toString = "None[" + elemRep + "]"
}
-
- @serializable
case class ListRep[A](elemRep: TypeRep[A]) extends TypeRep[List[A]] {
override def toString = "List[" + elemRep + "]"
}
-
- @serializable
case class ArrayRep[A](elemRep: TypeRep[A]) extends TypeRep[Array[A]] {
override def toString = "Array[" + elemRep + "]"
}
-
- @serializable
case class Tuple2Rep[A1, A2](_1: TypeRep[A1], _2: TypeRep[A2]) extends TypeRep[(A1, A2)] {
override def toString = "Tuple2[" + _1 + ", " + _2 + "]"
}
- @serializable
case class Tuple3Rep[A1, A2, A3](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3]) extends TypeRep[Tuple3[A1, A2, A3]] {
override def toString = "Tuple3[" + _1 + ", " + _2 + ", " + _3 + "]"
}
- @serializable
case class Tuple4Rep[A1, A2, A3, A4](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3], _4: TypeRep[A4]) extends TypeRep[Tuple4[A1, A2, A3, A4]] {
override def toString = "Tuple4[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + "]"
}
- @serializable
case class Tuple5Rep[A1, A2, A3, A4, A5](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3], _4: TypeRep[A4], _5: TypeRep[A5]) extends TypeRep[Tuple5[A1, A2, A3, A4, A5]] {
override def toString = "Tuple5[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + "]"
}
- @serializable
case class Tuple6Rep[A1, A2, A3, A4, A5, A6](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6]) extends TypeRep[Tuple6[A1, A2, A3, A4, A5, A6]] {
override def toString = "Tuple6[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + "]"
}
- @serializable
case class Tuple7Rep[A1, A2, A3, A4, A5, A6, A7](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7]) extends TypeRep[Tuple7[A1, A2, A3, A4, A5, A6, A7]] {
override def toString = "Tuple7[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + "]"
}
- @serializable
case class Tuple8Rep[A1, A2, A3, A4, A5, A6, A7, A8](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7], val _8: TypeRep[A8]) extends TypeRep[Tuple8[A1, A2, A3, A4, A5, A6, A7, A8]] {
override def toString = "Tuple8[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + ", " + _8 + "]"
}
- @serializable
case class Tuple9Rep[A1, A2, A3, A4, A5, A6, A7, A8, A9](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7], val _8: TypeRep[A8], val _9: TypeRep[A9]) extends TypeRep[Tuple9[A1, A2, A3, A4, A5, A6, A7, A8, A9]] {
override def toString = "Tuple9[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + ", " + _8 + ", " + _9 + "]"
}
- @serializable
case class Function1Rep[A1, B](a1: TypeRep[A1], b: TypeRep[B]) extends TypeRep[Function1[A1, B]] {
override def toString = "Function1[" + a1 + ", " + b + "]"
}
- @serializable
case class Function2Rep[A1, A2, B](a1: TypeRep[A1], a2: TypeRep[A2], b: TypeRep[B]) extends TypeRep[Function2[A1, A2, B]] {
override def toString = "Function2[" + a1 + ", " + a2 + ", " + b + "]"
}
- @serializable
case class Function3Rep[A1, A2, A3, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], b: TypeRep[B]) extends TypeRep[Function3[A1, A2, A3, B]] {
override def toString = "Function3[" + a1 + ", " + a2 + ", " + a3 + ", " + b + "]"
}
- @serializable
case class Function4Rep[A1, A2, A3, A4, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], b: TypeRep[B]) extends TypeRep[Function4[A1, A2, A3, A4, B]] {
override def toString = "Function4[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + b + "]"
}
- @serializable
case class Function5Rep[A1, A2, A3, A4, A5, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], b: TypeRep[B]) extends TypeRep[Function5[A1, A2, A3, A4, A5, B]] {
override def toString = "Function5[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + b + "]"
}
- @serializable
case class Function6Rep[A1, A2, A3, A4, A5, A6, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], b: TypeRep[B]) extends TypeRep[Function6[A1, A2, A3, A4, A5, A6, B]] {
override def toString = "Function6[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + b + "]"
}
- @serializable
case class Function7Rep[A1, A2, A3, A4, A5, A6, A7, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], b: TypeRep[B]) extends TypeRep[Function7[A1, A2, A3, A4, A5, A6, A7, B]] {
override def toString = "Function7[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + b + "]"
}
- @serializable
case class Function8Rep[A1, A2, A3, A4, A5, A6, A7, A8, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], a8: TypeRep[A8], b: TypeRep[B]) extends TypeRep[Function8[A1, A2, A3, A4, A5, A6, A7, A8, B]] {
override def toString = "Function8[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + a8 + b + "]"
}
- @serializable
case class Function9Rep[A1, A2, A3, A4, A5, A6, A7, A8, A9, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], a8: TypeRep[A8], a9: TypeRep[A9], b: TypeRep[B]) extends TypeRep[Function9[A1, A2, A3, A4, A5, A6, A7, A8, A9, B]] {
override def toString = "Function9[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + a8 + ", " + b + "]"
}
/*
- @serializable
case class ObjectRep[A](c: Class) extends TypeRep[A] {
override def toString = c.getName
}
diff --git a/test/files/jvm/unreachable/Foo_1.flags b/test/files/jvm/unreachable/Foo_1.flags
new file mode 100644
index 0000000000..ce6e93b3da
--- /dev/null
+++ b/test/files/jvm/unreachable/Foo_1.flags
@@ -0,0 +1 @@
+-Ynooptimise \ No newline at end of file
diff --git a/test/files/jvm/unreachable/Foo_1.scala b/test/files/jvm/unreachable/Foo_1.scala
new file mode 100644
index 0000000000..d17421c516
--- /dev/null
+++ b/test/files/jvm/unreachable/Foo_1.scala
@@ -0,0 +1,110 @@
+class Foo_1 {
+ def unreachableNormalExit: Int = {
+ return 42
+ 0
+ }
+
+ def unreachableIf: Int = {
+ return 42
+ if (util.Random.nextInt % 2 == 0)
+ 0
+ else
+ 1
+ }
+
+ def unreachableIfBranches: Int = {
+ if (util.Random.nextInt % 2 == 0)
+ return 42
+ else
+ return 42
+
+ return 0
+ }
+
+ def unreachableOneLegIf: Int = {
+ if (util.Random.nextInt % 2 == 0)
+ return 42
+
+ return 42
+ }
+
+ def unreachableLeftBranch: Int = {
+ val result = if (util.Random.nextInt % 2 == 0)
+ return 42
+ else
+ 42
+
+ return result
+ }
+
+ def unreachableRightBranch: Int = {
+ val result = if (util.Random.nextInt % 2 == 0)
+ 42
+ else
+ return 42
+
+ return result
+ }
+
+ def unreachableTryCatchFinally: Int = {
+ return 42
+ try {
+ return 0
+ } catch {
+ case x: Throwable => return 1
+ } finally {
+ return 2
+ }
+ return 3
+ }
+
+ def unreachableAfterTry: Int = {
+ try {
+ return 42
+ } catch {
+ case x: Throwable => return 2
+ }
+ return 3
+ }
+
+ def unreachableAfterCatch: Int = {
+ try {
+ error("haha")
+ } catch {
+ case x: Throwable => return 42
+ }
+ return 3
+ }
+
+ def unreachableAfterFinally: Int = {
+ try {
+ return 1
+ } catch {
+ case x: Throwable => return 2
+ } finally {
+ return 42
+ }
+ return 3
+ }
+
+ def unreachableSwitch: Int = {
+ return 42
+ val x = util.Random.nextInt % 2
+ x match {
+ case 0 => return 0
+ case 1 => return 1
+ case _ => error("wtf")
+ }
+ 2
+ }
+
+ def unreachableAfterSwitch: Int = {
+ val x = util.Random.nextInt % 2
+ x match {
+ case 0 => return 42
+ case 1 => return 41 + x
+ case _ => error("wtf")
+ }
+ 2
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/unreachable/Test.scala b/test/files/jvm/unreachable/Test.scala
new file mode 100644
index 0000000000..3f520eb106
--- /dev/null
+++ b/test/files/jvm/unreachable/Test.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest.BytecodeTest
+import scala.tools.asm
+import asm.tree.InsnList
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("Foo_1")
+ // Foo_1 is full of unreachable code which if not elimintated
+ // will result in NOPs as can be confirmed by adding -Ydisable-unreachable-prevention
+ // to Foo_1.flags
+ for (methodNode <- classNode.methods.asScala) {
+ val got = count(methodNode.instructions, asm.Opcodes.NOP)
+ if (got != 0) println(s"Found $got NOP(s) in ${methodNode.name}")
+ }
+ }
+
+ def count(insnList: InsnList, opcode: Int): Int = {
+ def isNop(node: asm.tree.AbstractInsnNode): Boolean =
+ (node.getOpcode == opcode)
+ insnList.iterator.asScala.count(isNop)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/value-class-boxing.check b/test/files/jvm/value-class-boxing.check
new file mode 100644
index 0000000000..20a9fe2ba8
--- /dev/null
+++ b/test/files/jvm/value-class-boxing.check
@@ -0,0 +1,7 @@
+a2 and a1: bytecode identical
+a3 and a1: bytecode identical
+a4 and a1: bytecode identical
+b2 and b1: bytecode identical
+b3 and b1: bytecode identical
+b4 and b1: bytecode identical
+b5 and b1: bytecode identical
diff --git a/test/files/jvm/value-class-boxing/Analyzed_1.scala b/test/files/jvm/value-class-boxing/Analyzed_1.scala
new file mode 100644
index 0000000000..dec8565351
--- /dev/null
+++ b/test/files/jvm/value-class-boxing/Analyzed_1.scala
@@ -0,0 +1,17 @@
+class Wrap(val x: Int) extends AnyVal {
+ def ***(other: Bip): Wrap = new Wrap(x * other.x)
+}
+class Bip(val x: Int) extends AnyVal
+
+class SameBytecode {
+ def a1(x: Int, y: Int): Int = x + y
+ def a2(x: Wrap, y: Wrap): Wrap = new Wrap(x.x + y.x)
+ def a3(x: Int, y: Wrap): Wrap = new Wrap(x + y.x)
+ def a4(x: Int, y: Wrap): Int = x + y.x
+
+ def b1(x: Wrap, y: Int): Int = (x *** new Bip(y)).x
+ def b2(x: Wrap, y: Bip): Wrap = x *** y
+ def b3(x: Wrap, y: Int): Wrap = x *** new Bip(y)
+ def b4(x: Wrap, y: Bip): Bip = new Bip((x *** y).x)
+ def b5(x: Wrap, y: Int): Bip = new Bip((x *** new Bip(y)).x)
+}
diff --git a/test/files/jvm/value-class-boxing/test.scala b/test/files/jvm/value-class-boxing/test.scala
new file mode 100644
index 0000000000..cf331832de
--- /dev/null
+++ b/test/files/jvm/value-class-boxing/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ List("a2", "a3", "a4") foreach { m =>
+ print(m + " and a1: ")
+ sameBytecode(getMethod(classNode, "a1"), getMethod(classNode, m))
+ }
+ List("b2", "b3", "b4", "b5") foreach { m =>
+ print(m + " and b1: ")
+ sameBytecode(getMethod(classNode, "b1"), getMethod(classNode, m))
+ }
+ }
+}
diff --git a/test/files/jvm/xmlattr.scala b/test/files/jvm/xmlattr.scala
index d214642eb6..6423268ba7 100644
--- a/test/files/jvm/xmlattr.scala
+++ b/test/files/jvm/xmlattr.scala
@@ -6,6 +6,7 @@ object Test {
UnprefixedAttributeTest()
AttributeWithOptionTest()
AttributeOutputTest()
+ AttributeOperatorTest()
}
object UnprefixedAttributeTest {
@@ -60,4 +61,10 @@ object Test {
}
}
+ object AttributeOperatorTest {
+ def apply() {
+ val xml = <foo bar="apple" />
+ assert(xml \@ "bar" == "apple")
+ }
+ }
}
diff --git a/test/files/lib/jsoup-1.3.1.jar.desired.sha1 b/test/files/lib/jsoup-1.3.1.jar.desired.sha1
new file mode 100644
index 0000000000..46fa3dae9d
--- /dev/null
+++ b/test/files/lib/jsoup-1.3.1.jar.desired.sha1
@@ -0,0 +1 @@
+346d3dff4088839d6b4d163efa2892124039d216 ?jsoup-1.3.1.jar
diff --git a/test/files/neg/abstract-inaccessible.check b/test/files/neg/abstract-inaccessible.check
index 42b98ac026..d56f5691be 100644
--- a/test/files/neg/abstract-inaccessible.check
+++ b/test/files/neg/abstract-inaccessible.check
@@ -1,13 +1,15 @@
-abstract-inaccessible.scala:5: error: method implementMe in trait YourTrait references private[foo] trait Bippy.
+abstract-inaccessible.scala:5: warning: method implementMe in trait YourTrait references private[foo] trait Bippy.
Classes which cannot access Bippy may be unable to provide a concrete implementation of implementMe.
def implementMe(f: Int => (String, Bippy)): Unit
^
-abstract-inaccessible.scala:6: error: method overrideMe in trait YourTrait references private[foo] trait Bippy.
+abstract-inaccessible.scala:6: warning: method overrideMe in trait YourTrait references private[foo] trait Bippy.
Classes which cannot access Bippy may be unable to override overrideMe.
def overrideMe[T <: Bippy](x: T): T = x
^
-abstract-inaccessible.scala:7: error: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy.
+abstract-inaccessible.scala:7: warning: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy.
Classes which cannot access Bippy may be unable to override overrideMeAlso.
def overrideMeAlso(x: Map[Int, Set[Bippy]]) = 5
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/ambiguous-float-dots.check b/test/files/neg/ambiguous-float-dots.check
index 6c21056d7a..cdd2d6fa2a 100644
--- a/test/files/neg/ambiguous-float-dots.check
+++ b/test/files/neg/ambiguous-float-dots.check
@@ -1,16 +1,27 @@
-ambiguous-float-dots.scala:2: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ambiguous-float-dots.scala:2: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
val x0 = 5.
^
-ambiguous-float-dots.scala:6: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ambiguous-float-dots.scala:6: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
val x1 = 5.f
^
-ambiguous-float-dots.scala:7: error: Treating numbers with a leading zero as octal is deprecated.
+ambiguous-float-dots.scala:7: warning: Treating numbers with a leading zero as octal is deprecated.
val y0 = 055
^
-ambiguous-float-dots.scala:11: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ambiguous-float-dots.scala:11: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
1.+(2)
^
-ambiguous-float-dots.scala:12: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ambiguous-float-dots.scala:12: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
1. + 2
^
-5 errors found
+ambiguous-float-dots.scala:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 1.+(2)
+ ^
+ambiguous-float-dots.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 1. + 2
+ ^
+ambiguous-float-dots.scala:13: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 1 + 2
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/annot-nonconst.check b/test/files/neg/annot-nonconst.check
index b43e58a0ca..5b3da7a13c 100644
--- a/test/files/neg/annot-nonconst.check
+++ b/test/files/neg/annot-nonconst.check
@@ -8,7 +8,7 @@ make your annotation visible at runtime. If that is what
you want, you must write the annotation class in Java.
class Ann2(value: String) extends annotation.ClassfileAnnotation
^
-annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: n
+annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n
@Length(n) def foo = "foo"
^
annot-nonconst.scala:7: error: annotation argument cannot be null
diff --git a/test/files/neg/anyval-anyref-parent.check b/test/files/neg/anyval-anyref-parent.check
index fe20e5de81..8c2aa36583 100644
--- a/test/files/neg/anyval-anyref-parent.check
+++ b/test/files/neg/anyval-anyref-parent.check
@@ -3,7 +3,7 @@ trait Foo2 extends AnyVal // fail
^
anyval-anyref-parent.scala:5: error: Any does not have a constructor
class Bar1 extends Any // fail
- ^
+ ^
anyval-anyref-parent.scala:6: error: value class needs to have exactly one public val parameter
class Bar2(x: Int) extends AnyVal // fail
^
diff --git a/test/files/neg/array-not-seq.check b/test/files/neg/array-not-seq.check
deleted file mode 100644
index a3a639e772..0000000000
--- a/test/files/neg/array-not-seq.check
+++ /dev/null
@@ -1,13 +0,0 @@
-array-not-seq.scala:2: error: An Array will no longer match as Seq[_].
- def f1(x: Any) = x.isInstanceOf[Seq[_]]
- ^
-array-not-seq.scala:4: error: An Array will no longer match as Seq[_].
- case _: Seq[_] => true
- ^
-array-not-seq.scala:16: error: An Array will no longer match as Seq[_].
- case (Some(_: Seq[_]), Nil, _) => 1
- ^
-array-not-seq.scala:17: error: An Array will no longer match as Seq[_].
- case (None, List(_: List[_], _), _) => 2
- ^
-four errors found
diff --git a/test/files/neg/array-not-seq.flags b/test/files/neg/array-not-seq.flags
deleted file mode 100644
index 4e9f7e4a56..0000000000
--- a/test/files/neg/array-not-seq.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xmigration -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/array-not-seq.scala b/test/files/neg/array-not-seq.scala
deleted file mode 100644
index 5f367bdd85..0000000000
--- a/test/files/neg/array-not-seq.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-object Test {
- def f1(x: Any) = x.isInstanceOf[Seq[_]]
- def f2(x: Any) = x match {
- case _: Seq[_] => true
- case _ => false
- }
-
- def f3(x: Any) = x match {
- case _: Array[_] => true
- case _ => false
- }
-
- def f4(x: Any) = x.isInstanceOf[Traversable[_]]
-
- def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
- case (Some(_: Seq[_]), Nil, _) => 1
- case (None, List(_: List[_], _), _) => 2
- case _ => 3
- }
-
- def main(args: Array[String]): Unit = {
- // println(f1(Array(1)))
- // println(f2(Array(1)))
- // println(f3(Array(1))
- }
-}
diff --git a/test/files/neg/case-collision.check b/test/files/neg/case-collision.check
index 4edc6f1205..22cf105a4f 100644
--- a/test/files/neg/case-collision.check
+++ b/test/files/neg/case-collision.check
@@ -1,10 +1,12 @@
-case-collision.scala:5: error: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
+case-collision.scala:5: warning: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
class BIPPY
^
-case-collision.scala:11: error: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
+case-collision.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
object HyRaX
^
-case-collision.scala:8: error: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
+case-collision.scala:8: warning: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
object DINGO
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check
index 62f895cc7e..aaf51480c3 100644
--- a/test/files/neg/catch-all.check
+++ b/test/files/neg/catch-all.check
@@ -1,10 +1,12 @@
-catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning.
+catch-all.scala:2: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning.
try { "warn" } catch { case _ => }
^
-catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+catch-all.scala:4: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
try { "warn" } catch { case x => }
^
-catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+catch-all.scala:6: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
try { "warn" } catch { case _: RuntimeException => ; case x => }
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/check-dead.check b/test/files/neg/check-dead.check
index 29601c1d4a..2150a942bf 100644
--- a/test/files/neg/check-dead.check
+++ b/test/files/neg/check-dead.check
@@ -1,13 +1,15 @@
-check-dead.scala:7: error: dead code following this construct
+check-dead.scala:7: warning: dead code following this construct
def z1 = y1(throw new Exception) // should warn
^
-check-dead.scala:10: error: dead code following this construct
+check-dead.scala:10: warning: dead code following this construct
def z2 = y2(throw new Exception) // should warn
^
-check-dead.scala:29: error: dead code following this construct
+check-dead.scala:29: warning: dead code following this construct
throw new Exception // should warn
^
-check-dead.scala:33: error: dead code following this construct
+check-dead.scala:33: warning: dead code following this construct
throw new Exception // should warn
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check
index d785179a56..e5f1a38d96 100644
--- a/test/files/neg/checksensible.check
+++ b/test/files/neg/checksensible.check
@@ -1,100 +1,102 @@
-checksensible.scala:13: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:13: warning: comparing a fresh object using `eq' will always yield false
(new AnyRef) eq (new AnyRef)
^
-checksensible.scala:14: error: comparing a fresh object using `ne' will always yield true
+checksensible.scala:14: warning: comparing a fresh object using `ne' will always yield true
(new AnyRef) ne (new AnyRef)
^
-checksensible.scala:15: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:15: warning: comparing a fresh object using `eq' will always yield false
Shmoopie eq (new AnyRef)
^
-checksensible.scala:16: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:16: warning: comparing a fresh object using `eq' will always yield false
(Shmoopie: AnyRef) eq (new AnyRef)
^
-checksensible.scala:17: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:17: warning: comparing a fresh object using `eq' will always yield false
(new AnyRef) eq Shmoopie
^
-checksensible.scala:18: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:18: warning: comparing a fresh object using `eq' will always yield false
(new AnyRef) eq null
^
-checksensible.scala:19: error: comparing a fresh object using `eq' will always yield false
+checksensible.scala:19: warning: comparing a fresh object using `eq' will always yield false
null eq new AnyRef
^
-checksensible.scala:26: error: comparing values of types Unit and Int using `==' will always yield false
+checksensible.scala:26: warning: comparing values of types Unit and Int using `==' will always yield false
(c = 1) == 0
^
-checksensible.scala:27: error: comparing values of types Int and Unit using `==' will always yield false
+checksensible.scala:27: warning: comparing values of types Int and Unit using `==' will always yield false
0 == (c = 1)
^
-checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false
+checksensible.scala:29: warning: comparing values of types Int and String using `==' will always yield false
1 == "abc"
^
-checksensible.scala:33: error: comparing values of types Some[Int] and Int using `==' will always yield false
+checksensible.scala:33: warning: comparing values of types Some[Int] and Int using `==' will always yield false
Some(1) == 1 // as above
^
-checksensible.scala:38: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:38: warning: comparing a fresh object using `==' will always yield false
new AnyRef == 1
^
-checksensible.scala:41: error: comparing values of types Int and Boolean using `==' will always yield false
+checksensible.scala:41: warning: comparing values of types Int and Boolean using `==' will always yield false
1 == (new java.lang.Boolean(true))
^
-checksensible.scala:43: error: comparing values of types Int and Boolean using `!=' will always yield true
+checksensible.scala:43: warning: comparing values of types Int and Boolean using `!=' will always yield true
1 != true
^
-checksensible.scala:44: error: comparing values of types Unit and Boolean using `==' will always yield false
+checksensible.scala:44: warning: comparing values of types Unit and Boolean using `==' will always yield false
() == true
^
-checksensible.scala:45: error: comparing values of types Unit and Unit using `==' will always yield true
+checksensible.scala:45: warning: comparing values of types Unit and Unit using `==' will always yield true
() == ()
^
-checksensible.scala:46: error: comparing values of types Unit and Unit using `==' will always yield true
+checksensible.scala:46: warning: comparing values of types Unit and Unit using `==' will always yield true
() == println
^
-checksensible.scala:47: error: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true
+checksensible.scala:47: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true
() == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false
^
-checksensible.scala:48: error: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false
+checksensible.scala:48: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false
scala.runtime.BoxedUnit.UNIT != ()
^
-checksensible.scala:51: error: comparing values of types Int and Unit using `!=' will always yield true
+checksensible.scala:51: warning: comparing values of types Int and Unit using `!=' will always yield true
(1 != println)
^
-checksensible.scala:52: error: comparing values of types Int and Symbol using `!=' will always yield true
+checksensible.scala:52: warning: comparing values of types Int and Symbol using `!=' will always yield true
(1 != 'sym)
^
-checksensible.scala:58: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:58: warning: comparing a fresh object using `==' will always yield false
((x: Int) => x + 1) == null
^
-checksensible.scala:59: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:59: warning: comparing a fresh object using `==' will always yield false
Bep == ((_: Int) + 1)
^
-checksensible.scala:61: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:61: warning: comparing a fresh object using `==' will always yield false
new Object == new Object
^
-checksensible.scala:62: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:62: warning: comparing a fresh object using `==' will always yield false
new Object == "abc"
^
-checksensible.scala:63: error: comparing a fresh object using `!=' will always yield true
+checksensible.scala:63: warning: comparing a fresh object using `!=' will always yield true
new Exception() != new Exception()
^
-checksensible.scala:66: error: comparing values of types Int and Null using `==' will always yield false
+checksensible.scala:66: warning: comparing values of types Int and Null using `==' will always yield false
if (foo.length == null) "plante" else "plante pas"
^
-checksensible.scala:71: error: comparing values of types Bip and Bop using `==' will always yield false
+checksensible.scala:71: warning: comparing values of types Bip and Bop using `==' will always yield false
(x1 == x2)
^
-checksensible.scala:81: error: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
+checksensible.scala:81: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
c3 == z1
^
-checksensible.scala:82: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
+checksensible.scala:82: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
z1 == c3
^
-checksensible.scala:83: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
+checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
z1 != c3
^
-checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true
+checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true
c3 != "abc"
^
-checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true
+checksensible.scala:95: warning: comparing values of types Unit and Int using `!=' will always yield true
while ((c = in.read) != -1)
^
-33 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+33 warnings found
+one error found
diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check
index 12428c7626..5f9d0a1ccc 100644
--- a/test/files/neg/classmanifests_new_deprecations.check
+++ b/test/files/neg/classmanifests_new_deprecations.check
@@ -1,31 +1,33 @@
-classmanifests_new_deprecations.scala:2: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
def cm1[T: ClassManifest] = ???
^
-classmanifests_new_deprecations.scala:3: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
^
-classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
val cm3: ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
val cm3: ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:6: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
def rcm1[T: scala.reflect.ClassManifest] = ???
^
-classmanifests_new_deprecations.scala:7: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
^
-classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
val rcm3: scala.reflect.ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
val rcm3: scala.reflect.ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:10: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
type CM[T] = ClassManifest[T]
^
-classmanifests_new_deprecations.scala:15: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
type RCM[T] = scala.reflect.ClassManifest[T]
^
-10 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+10 warnings found
+one error found
diff --git a/test/files/neg/cycle-bounds.check b/test/files/neg/cycle-bounds.check
new file mode 100644
index 0000000000..d924838aec
--- /dev/null
+++ b/test/files/neg/cycle-bounds.check
@@ -0,0 +1,4 @@
+cycle-bounds.scala:5: error: illegal cyclic reference involving type T
+class NotOk[T <: Comparable[_ <: T]]
+ ^
+one error found
diff --git a/test/files/neg/cycle-bounds.flags b/test/files/neg/cycle-bounds.flags
new file mode 100644
index 0000000000..ca20f55172
--- /dev/null
+++ b/test/files/neg/cycle-bounds.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/neg/cycle-bounds.scala b/test/files/neg/cycle-bounds.scala
new file mode 100644
index 0000000000..0b43bc703e
--- /dev/null
+++ b/test/files/neg/cycle-bounds.scala
@@ -0,0 +1,5 @@
+// This should be allowed
+class Ok[T <: Comparable[_ >: T]]
+
+// This is (il)legitimately a cyclic reference
+class NotOk[T <: Comparable[_ <: T]]
diff --git a/test/files/neg/cyclics-import.check b/test/files/neg/cyclics-import.check
index ef355fab0a..be09fca374 100644
--- a/test/files/neg/cyclics-import.check
+++ b/test/files/neg/cyclics-import.check
@@ -3,13 +3,4 @@ Note: this is often due in part to a class depending on a definition nested with
If applicable, you may wish to try moving some members into another object.
import User.UserStatus._
^
-cyclics-import.scala:12: error: not found: type Value
- type UserStatus = Value
- ^
-cyclics-import.scala:14: error: not found: value Value
- val Active = Value("1")
- ^
-cyclics-import.scala:15: error: not found: value Value
- val Disabled = Value("2")
- ^
-four errors found
+one error found
diff --git a/test/files/neg/dbldef.check b/test/files/neg/dbldef.check
index 3ee63475e4..b896c4cdcf 100644
--- a/test/files/neg/dbldef.check
+++ b/test/files/neg/dbldef.check
@@ -6,9 +6,7 @@ dbldef.scala:1: error: type mismatch;
required: Int
case class test0(x: Int, x: Float)
^
-dbldef.scala:1: error: type mismatch;
- found : Float
- required: Int
+dbldef.scala:1: error: in class test0, multiple overloaded alternatives of x define default arguments
case class test0(x: Int, x: Float)
^
three errors found
diff --git a/test/files/neg/eta-expand-star-deprecation.check b/test/files/neg/eta-expand-star-deprecation.check
new file mode 100644
index 0000000000..a79f0df76c
--- /dev/null
+++ b/test/files/neg/eta-expand-star-deprecation.check
@@ -0,0 +1,4 @@
+warning: -Yeta-expand-keeps-star is deprecated: This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/eta-expand-star-deprecation.flags b/test/files/neg/eta-expand-star-deprecation.flags
new file mode 100644
index 0000000000..5ac8b638e4
--- /dev/null
+++ b/test/files/neg/eta-expand-star-deprecation.flags
@@ -0,0 +1 @@
+-Yeta-expand-keeps-star -deprecation -Xfatal-warnings
diff --git a/test/files/neg/eta-expand-star-deprecation.scala b/test/files/neg/eta-expand-star-deprecation.scala
new file mode 100644
index 0000000000..5749692522
--- /dev/null
+++ b/test/files/neg/eta-expand-star-deprecation.scala
@@ -0,0 +1,8 @@
+object Test {
+ def f[T](xs: T*): Unit = ()
+ def g[T] = f[T] _
+
+ def main(args: Array[String]): Unit = {
+ g(1, 2)
+ }
+}
diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check
index 0f0d13cb33..c573eb3e15 100644
--- a/test/files/neg/exhausting.check
+++ b/test/files/neg/exhausting.check
@@ -1,25 +1,27 @@
-exhausting.scala:21: error: match may not be exhaustive.
+exhausting.scala:21: warning: match may not be exhaustive.
It would fail on the following input: List(_, _, _)
def fail1[T](xs: List[T]) = xs match {
^
-exhausting.scala:27: error: match may not be exhaustive.
+exhausting.scala:27: warning: match may not be exhaustive.
It would fail on the following input: Nil
def fail2[T](xs: List[T]) = xs match {
^
-exhausting.scala:32: error: match may not be exhaustive.
+exhausting.scala:32: warning: match may not be exhaustive.
It would fail on the following input: List((x: Int forSome x not in (1, 2)))
def fail3a(xs: List[Int]) = xs match {
^
-exhausting.scala:39: error: match may not be exhaustive.
+exhausting.scala:39: warning: match may not be exhaustive.
It would fail on the following input: Bar3
def fail3[T](x: Foo[T]) = x match {
^
-exhausting.scala:47: error: match may not be exhaustive.
+exhausting.scala:47: warning: match may not be exhaustive.
It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
^
-exhausting.scala:56: error: match may not be exhaustive.
+exhausting.scala:56: warning: match may not be exhaustive.
It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
^
-6 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/gadts1.check b/test/files/neg/gadts1.check
index a5e3e0de10..9b7ea5556a 100644
--- a/test/files/neg/gadts1.check
+++ b/test/files/neg/gadts1.check
@@ -1,8 +1,3 @@
-gadts1.scala:15: error: type mismatch;
- found : Test.Double
- required: a
- case NumTerm(n) => c.x = Double(1.0)
- ^
gadts1.scala:20: error: Test.Cell[a] does not take parameters
case Cell[a](x: Int) => c.x = 5
^
@@ -11,4 +6,4 @@ gadts1.scala:20: error: type mismatch;
required: a
case Cell[a](x: Int) => c.x = 5
^
-three errors found
+two errors found
diff --git a/test/files/neg/import-precedence.check b/test/files/neg/import-precedence.check
new file mode 100644
index 0000000000..5f99611052
--- /dev/null
+++ b/test/files/neg/import-precedence.check
@@ -0,0 +1,19 @@
+import-precedence.scala:18: error: reference to X is ambiguous;
+it is imported twice in the same scope by
+import uniq1.uniq2._
+and import uniq1.X
+ object Y { def f = X }
+ ^
+import-precedence.scala:61: error: reference to X is ambiguous;
+it is imported twice in the same scope by
+import uniq1.uniq2._
+and import uniq1._
+ object Y { def f = X }
+ ^
+import-precedence.scala:67: error: reference to X is ambiguous;
+it is imported twice in the same scope by
+import uniq1.uniq2.X
+and import uniq1.X
+ object Y { def f = X }
+ ^
+three errors found
diff --git a/test/files/neg/import-precedence.scala b/test/files/neg/import-precedence.scala
new file mode 100644
index 0000000000..0401635e32
--- /dev/null
+++ b/test/files/neg/import-precedence.scala
@@ -0,0 +1,68 @@
+package uniq1 {
+ object X
+ package uniq2 {
+ object X
+ package uniq3 {
+ object X
+ package uniq4 {
+ object X
+ }
+ }
+ }
+}
+
+package p1 {
+ import uniq1.X
+ package p2 {
+ import uniq1.uniq2._
+ object Y { def f = X }
+ }
+}
+
+package p2 {
+ import uniq1.uniq2._
+ package p2 {
+ import uniq1.X
+ object Y { def f = X }
+ }
+}
+
+package p3 {
+ import uniq1.X
+ import uniq1.uniq2._
+ object Y { def f = X }
+}
+
+package p4 {
+ import uniq1.uniq2._
+ import uniq1.X
+ object Y { def f = X }
+}
+
+package p5 {
+ import uniq1.X
+ package p6 {
+ import uniq1.uniq2.X
+ object Y { def f = X }
+ }
+}
+
+package p6 {
+ import uniq1._
+ package p5 {
+ import uniq1.uniq2._
+ object Y { def f = X }
+ }
+}
+
+package p7 {
+ import uniq1._
+ import uniq1.uniq2._
+ object Y { def f = X }
+}
+
+package p8 {
+ import uniq1.X
+ import uniq1.uniq2.X
+ object Y { def f = X }
+}
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.check b/test/files/neg/javaConversions-2.10-ambiguity.check
deleted file mode 100644
index c064a22964..0000000000
--- a/test/files/neg/javaConversions-2.10-ambiguity.check
+++ /dev/null
@@ -1,6 +0,0 @@
-javaConversions-2.10-ambiguity.scala:8: error: type mismatch;
- found : scala.collection.concurrent.Map[String,String]
- required: scala.collection.mutable.ConcurrentMap[String,String]
- assertType[mutable.ConcurrentMap[String, String]](a)
- ^
-one error found
diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check
index 54afc6f86d..0522bd8354 100644
--- a/test/files/neg/logImplicits.check
+++ b/test/files/neg/logImplicits.check
@@ -7,10 +7,10 @@ logImplicits.scala:7: applied implicit conversion from String("abc") to ?{def ma
logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int
math.max(122, x: Int)
^
-logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A]
+logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def ArrowAssoc[A](__leftOfArrow: A): ArrowAssoc[A]
def f = (1 -> 2) + "c"
^
-logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{def +: ?} = implicit def any2stringadd(x: Any): scala.runtime.StringAdd
+logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{def +: ?} = implicit def StringAdd[A](__thingToAdd: A): StringAdd[A]
def f = (1 -> 2) + "c"
^
logImplicits.scala:22: error: class Un needs to be abstract, since method unimplemented is not defined
diff --git a/test/files/neg/lubs.check b/test/files/neg/lubs.check
index 77ab20102c..affbd4983c 100644
--- a/test/files/neg/lubs.check
+++ b/test/files/neg/lubs.check
@@ -1,5 +1,10 @@
+lubs.scala:10: error: type mismatch;
+ found : test1.A[test1.A[Object]]
+ required: test1.A[test1.A[test1.A[Any]]]
+ val x3: A[A[A[Any]]] = f
+ ^
lubs.scala:11: error: type mismatch;
- found : test1.A[test1.A[test1.A[Any]]]
+ found : test1.A[test1.A[Object]]
required: test1.A[test1.A[test1.A[test1.A[Any]]]]
val x4: A[A[A[A[Any]]]] = f
^
@@ -13,4 +18,4 @@ lubs.scala:25: error: type mismatch;
required: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A}}}
val x4: A { type T >: Null <: A { type T >: Null <: A { type T >: Null <: A } } } = f
^
-three errors found
+four errors found
diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check
index c7b58d70d2..621d318ceb 100644
--- a/test/files/neg/macro-basic-mamdmi.check
+++ b/test/files/neg/macro-basic-mamdmi.check
@@ -1,4 +1,5 @@
-Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
println(foo(2) + Macros.bar(2) * new Macros().quux(4))
^
one error found
diff --git a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
index 908438cf65..f9e0ca5077 100644
--- a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
+++ b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
@@ -3,19 +3,19 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
c.Expr[Int](body)
}
def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
c.Expr[Int](body)
}
def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
c.Expr[Int](body)
}
}
diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check
index 22b667c390..c653eabaef 100644
--- a/test/files/neg/macro-deprecate-idents.check
+++ b/test/files/neg/macro-deprecate-idents.check
@@ -1,52 +1,54 @@
-macro-deprecate-idents.scala:2: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:2: warning: macro is now a reserved word; usage as an identifier is deprecated
val macro = ???
^
-macro-deprecate-idents.scala:6: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:6: warning: macro is now a reserved word; usage as an identifier is deprecated
var macro = ???
^
-macro-deprecate-idents.scala:10: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:10: warning: macro is now a reserved word; usage as an identifier is deprecated
type macro = Int
^
-macro-deprecate-idents.scala:14: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:14: warning: macro is now a reserved word; usage as an identifier is deprecated
class macro
^
-macro-deprecate-idents.scala:18: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:18: warning: macro is now a reserved word; usage as an identifier is deprecated
class macro
^
-macro-deprecate-idents.scala:22: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:22: warning: macro is now a reserved word; usage as an identifier is deprecated
object macro
^
-macro-deprecate-idents.scala:26: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:26: warning: macro is now a reserved word; usage as an identifier is deprecated
object macro
^
-macro-deprecate-idents.scala:30: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:30: warning: macro is now a reserved word; usage as an identifier is deprecated
trait macro
^
-macro-deprecate-idents.scala:34: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:34: warning: macro is now a reserved word; usage as an identifier is deprecated
trait macro
^
-macro-deprecate-idents.scala:37: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:37: warning: macro is now a reserved word; usage as an identifier is deprecated
package macro {
^
-macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:38: warning: macro is now a reserved word; usage as an identifier is deprecated
package macro.bar {
^
-macro-deprecate-idents.scala:43: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:43: warning: macro is now a reserved word; usage as an identifier is deprecated
package macro.foo {
^
-macro-deprecate-idents.scala:48: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:48: warning: macro is now a reserved word; usage as an identifier is deprecated
val Some(macro) = Some(42)
^
-macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:49: warning: macro is now a reserved word; usage as an identifier is deprecated
macro match {
^
-macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:50: warning: macro is now a reserved word; usage as an identifier is deprecated
case macro => println(macro)
^
-macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:50: warning: macro is now a reserved word; usage as an identifier is deprecated
case macro => println(macro)
^
-macro-deprecate-idents.scala:55: error: macro is now a reserved word; usage as an identifier is deprecated
+macro-deprecate-idents.scala:55: warning: macro is now a reserved word; usage as an identifier is deprecated
def macro = 2
^
-17 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+17 warnings found
+one error found
diff --git a/test/files/neg/macro-invalidimpl-f/Impls_1.scala b/test/files/neg/macro-invalidimpl-f/Impls_1.scala
index 334ee714be..0e4da86d22 100644
--- a/test/files/neg/macro-invalidimpl-f/Impls_1.scala
+++ b/test/files/neg/macro-invalidimpl-f/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def fooNullary(c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
c.Expr[Unit](body)
}
diff --git a/test/files/neg/macro-invalidimpl-g/Impls_1.scala b/test/files/neg/macro-invalidimpl-g/Impls_1.scala
index 334ee714be..0e4da86d22 100644
--- a/test/files/neg/macro-invalidimpl-g/Impls_1.scala
+++ b/test/files/neg/macro-invalidimpl-g/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def fooNullary(c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
c.Expr[Unit](body)
}
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
index 7a7293422e..8205694768 100644
--- a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
+++ b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
@@ -6,9 +6,9 @@ object Impls {
import c.{prefix => prefix}
import c.universe._
val body = Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo_targs...")))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe))))),
Literal(Constant(())))
c.Expr[Unit](body)
}
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
index 8d7fdf3e8a..498bd4f18d 100644
--- a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
index 895e0dca50..8c8f039225 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
@@ -1,5 +1,5 @@
Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
- macro method foo cannot override an abstract method
+ macro method foo cannot be used here - term macros cannot override abstract methods
def foo(x: Int) = macro Impls.impl
^
one error found
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
index 895e0dca50..8c8f039225 100644
--- a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
@@ -1,5 +1,5 @@
Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
- macro method foo cannot override an abstract method
+ macro method foo cannot be used here - term macros cannot override abstract methods
def foo(x: Int) = macro Impls.impl
^
one error found
diff --git a/test/files/neg/macro-override-method-overrides-macro.check b/test/files/neg/macro-override-method-overrides-macro.check
index 66dc11be96..e8cba5d029 100644
--- a/test/files/neg/macro-override-method-overrides-macro.check
+++ b/test/files/neg/macro-override-method-overrides-macro.check
@@ -1,5 +1,5 @@
Macros_Test_2.scala:8: error: overriding macro method foo in class B of type (x: String)Unit;
- method foo cannot override a macro
+ method foo cannot be used here - only term macros can override term macros
override def foo(x: String) = println("fooDString")
^
one error found
diff --git a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
index ec93dd4111..69ef57d18d 100644
--- a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
+++ b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
@@ -4,7 +4,7 @@ object Impls {
def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
import c.{prefix => prefix}
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
c.Expr[Unit](body)
}
diff --git a/test/files/neg/macro-without-xmacros-a/Impls_1.scala b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
index 8976f8e28d..c6677c4fde 100644
--- a/test/files/neg/macro-without-xmacros-a/Impls_1.scala
+++ b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
@@ -3,16 +3,16 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))))
}
def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2)))))
}
def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3)))))
}
} \ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-b/Impls_1.scala b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
index 8976f8e28d..c6677c4fde 100644
--- a/test/files/neg/macro-without-xmacros-b/Impls_1.scala
+++ b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
@@ -3,16 +3,16 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))))
}
def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2)))))
}
def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3)))))
}
} \ No newline at end of file
diff --git a/test/files/neg/main1.check b/test/files/neg/main1.check
index 1a7a13e1e9..b745105818 100644
--- a/test/files/neg/main1.check
+++ b/test/files/neg/main1.check
@@ -1,26 +1,28 @@
-main1.scala:3: error: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program.
+main1.scala:3: warning: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program.
Reason: companion is a trait, which means no static forwarder can be generated.
object Foo { // companion is trait
^
-main1.scala:10: error: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program.
+main1.scala:10: warning: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program.
Reason: companion contains its own main method, which means no static forwarder can be generated.
object Foo { // companion has its own main
^
-main1.scala:22: error: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program.
+main1.scala:22: warning: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program.
Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated.
object Foo { // Companion contains main, but not an interfering main.
^
-main1.scala:31: error: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program.
+main1.scala:31: warning: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program.
Reason: companion contains its own main method, which means no static forwarder can be generated.
object Foo extends Foo { // Inherits main from the class
^
-main1.scala:39: error: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program.
+main1.scala:39: warning: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program.
Reason: companion contains its own main method, which means no static forwarder can be generated.
object Foo extends Foo { // Overrides main from the class
^
-5 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check
index d7dfacf3db..afb4db62e2 100644
--- a/test/files/neg/migration28.check
+++ b/test/files/neg/migration28.check
@@ -1,5 +1,7 @@
-migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+migration28.scala:4: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
List(1,2,3,4,5).scanRight(0)(_+_)
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/names-defaults-neg-warn.check b/test/files/neg/names-defaults-neg-warn.check
index e1085acf76..0f4edef84e 100644
--- a/test/files/neg/names-defaults-neg-warn.check
+++ b/test/files/neg/names-defaults-neg-warn.check
@@ -1,7 +1,9 @@
-names-defaults-neg-warn.scala:11: error: the parameter name s has been deprecated. Use x instead.
+names-defaults-neg-warn.scala:11: warning: the parameter name s has been deprecated. Use x instead.
deprNam2.f(s = "dlfkj")
^
-names-defaults-neg-warn.scala:12: error: the parameter name x has been deprecated. Use s instead.
+names-defaults-neg-warn.scala:12: warning: the parameter name x has been deprecated. Use s instead.
deprNam2.g(x = "dlkjf")
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index ea7c323b74..f6bd703e1f 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -100,7 +100,7 @@ Error occurred in an application involving default arguments.
^
names-defaults-neg.scala:86: error: module extending its companion class cannot use default constructor arguments
object C extends C()
- ^
+ ^
names-defaults-neg.scala:90: error: deprecated parameter name x has to be distinct from any other parameter name (deprecated or not).
def deprNam1(x: Int, @deprecatedName('x) y: String) = 0
^
diff --git a/test/files/neg/nested-annotation.check b/test/files/neg/nested-annotation.check
new file mode 100644
index 0000000000..ca263943fe
--- /dev/null
+++ b/test/files/neg/nested-annotation.check
@@ -0,0 +1,10 @@
+nested-annotation.scala:3: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime. If that is what
+you want, you must write the annotation class in Java.
+class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation
+ ^
+nested-annotation.scala:8: error: nested classfile annotations must be defined in java; found: inline
+ @ComplexAnnotation(new inline) def bippy(): Int = 1
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/nested-annotation.scala b/test/files/neg/nested-annotation.scala
new file mode 100644
index 0000000000..35c0cd3b75
--- /dev/null
+++ b/test/files/neg/nested-annotation.scala
@@ -0,0 +1,9 @@
+import annotation._
+
+class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation
+
+class A {
+ // It's hard to induce this error because @ComplexAnnotation(@inline) is a parse
+ // error so it never gets out of the parser, but:
+ @ComplexAnnotation(new inline) def bippy(): Int = 1
+}
diff --git a/test/files/neg/newpat_unreachable.check b/test/files/neg/newpat_unreachable.check
index 08453cac19..a928e3853a 100644
--- a/test/files/neg/newpat_unreachable.check
+++ b/test/files/neg/newpat_unreachable.check
@@ -1,27 +1,29 @@
-newpat_unreachable.scala:6: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+newpat_unreachable.scala:6: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` =>
case b => println("matched b")
^
-newpat_unreachable.scala:7: error: unreachable code due to variable pattern 'b' on line 6
+newpat_unreachable.scala:7: warning: unreachable code due to variable pattern 'b' on line 6
If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` =>
case c => println("matched c")
^
-newpat_unreachable.scala:8: error: unreachable code due to variable pattern 'b' on line 6
+newpat_unreachable.scala:8: warning: unreachable code due to variable pattern 'b' on line 6
If you intended to match against value d in class A, you must use backticks, like: case `d` =>
case d => println("matched d")
^
-newpat_unreachable.scala:9: error: unreachable code due to variable pattern 'b' on line 6
+newpat_unreachable.scala:9: warning: unreachable code due to variable pattern 'b' on line 6
case _ => println("matched neither")
^
-newpat_unreachable.scala:22: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+newpat_unreachable.scala:22: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
If you intended to match against parameter b of method g, you must use backticks, like: case `b` =>
case b => 1
^
-newpat_unreachable.scala:23: error: unreachable code due to variable pattern 'b' on line 22
+newpat_unreachable.scala:23: warning: unreachable code due to variable pattern 'b' on line 22
If you intended to match against parameter c of method h, you must use backticks, like: case `c` =>
case c => 2
^
-newpat_unreachable.scala:24: error: unreachable code due to variable pattern 'b' on line 22
+newpat_unreachable.scala:24: warning: unreachable code due to variable pattern 'b' on line 22
case _ => 3
^
-7 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+7 warnings found
+one error found
diff --git a/test/files/neg/nonlocal-warning.check b/test/files/neg/nonlocal-warning.check
new file mode 100644
index 0000000000..5202df655a
--- /dev/null
+++ b/test/files/neg/nonlocal-warning.check
@@ -0,0 +1,9 @@
+nonlocal-warning.scala:4: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+ catch { case x => 11 }
+ ^
+nonlocal-warning.scala:2: warning: catch block may intercept non-local return from method foo
+ def foo(l: List[Int]): Int = {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/pending/pos/t4649.flags b/test/files/neg/nonlocal-warning.flags
index e8fb65d50c..e8fb65d50c 100644
--- a/test/pending/pos/t4649.flags
+++ b/test/files/neg/nonlocal-warning.flags
diff --git a/test/files/neg/nonlocal-warning.scala b/test/files/neg/nonlocal-warning.scala
new file mode 100644
index 0000000000..f908a86302
--- /dev/null
+++ b/test/files/neg/nonlocal-warning.scala
@@ -0,0 +1,18 @@
+class Foo {
+ def foo(l: List[Int]): Int = {
+ try l foreach { _ => return 5 }
+ catch { case x => 11 }
+ 22
+ }
+
+ val pf: PartialFunction[Throwable, Unit] = {
+ case x if false => ()
+ }
+
+ def bar(l: List[Int]): Int = {
+ try l foreach { _ => return 5 }
+ catch pf
+ finally println()
+ 22
+ }
+}
diff --git a/test/files/neg/nullary-override.check b/test/files/neg/nullary-override.check
index 6b2ded2d4a..f032f4a6c2 100644
--- a/test/files/neg/nullary-override.check
+++ b/test/files/neg/nullary-override.check
@@ -1,4 +1,6 @@
-nullary-override.scala:2: error: non-nullary method overrides nullary method
+nullary-override.scala:2: warning: non-nullary method overrides nullary method
class B extends A { override def x(): Int = 4 }
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/overloaded-implicit.check b/test/files/neg/overloaded-implicit.check
index bdbe6a89d5..ca0870705d 100644
--- a/test/files/neg/overloaded-implicit.check
+++ b/test/files/neg/overloaded-implicit.check
@@ -1,7 +1,9 @@
-overloaded-implicit.scala:2: error: parameterized overloaded implicit methods are not visible as view bounds
+overloaded-implicit.scala:2: warning: parameterized overloaded implicit methods are not visible as view bounds
implicit def imp1[T](x: List[T]): Map[T, T] = Map()
^
-overloaded-implicit.scala:3: error: parameterized overloaded implicit methods are not visible as view bounds
+overloaded-implicit.scala:3: warning: parameterized overloaded implicit methods are not visible as view bounds
implicit def imp1[T](x: Set[T]): Map[T, T] = Map()
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/overloaded-implicit.flags b/test/files/neg/overloaded-implicit.flags
index 7949c2afa2..9c1e74e4ef 100644
--- a/test/files/neg/overloaded-implicit.flags
+++ b/test/files/neg/overloaded-implicit.flags
@@ -1 +1 @@
--Xlint -Xfatal-warnings
+-Xlint -Xfatal-warnings -Xdev
diff --git a/test/files/neg/package-ob-case.check b/test/files/neg/package-ob-case.check
index e6b2f858ef..063a120db1 100644
--- a/test/files/neg/package-ob-case.check
+++ b/test/files/neg/package-ob-case.check
@@ -1,5 +1,7 @@
-package-ob-case.scala:3: error: it is not recommended to define classes/objects inside of package objects.
+package-ob-case.scala:3: warning: it is not recommended to define classes/objects inside of package objects.
If possible, define class X in package foo instead.
case class X(z: Int) { }
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/pat_unreachable.check b/test/files/neg/pat_unreachable.check
index c5706b7fad..b4c0e7e104 100644
--- a/test/files/neg/pat_unreachable.check
+++ b/test/files/neg/pat_unreachable.check
@@ -1,13 +1,14 @@
-pat_unreachable.scala:5: error: unreachable code
- case Seq(x, y, z, w) => List(z,w) // redundant!
- ^
-pat_unreachable.scala:9: error: unreachable code
- case Seq(x, y) => List(x, y)
- ^
-pat_unreachable.scala:23: error: unreachable code
+pat_unreachable.scala:22: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
+If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` =>
+ case b => println("matched b")
+ ^
+pat_unreachable.scala:23: warning: unreachable code due to variable pattern 'b' on line 22
+If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` =>
case c => println("matched c")
^
-pat_unreachable.scala:24: error: unreachable code
+pat_unreachable.scala:24: warning: unreachable code due to variable pattern 'b' on line 22
case _ => println("matched neither")
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/pat_unreachable.flags b/test/files/neg/pat_unreachable.flags
index cb8324a345..85d8eb2ba2 100644
--- a/test/files/neg/pat_unreachable.flags
+++ b/test/files/neg/pat_unreachable.flags
@@ -1 +1 @@
--Xoldpatmat \ No newline at end of file
+-Xfatal-warnings
diff --git a/test/files/neg/patmat-type-check.check b/test/files/neg/patmat-type-check.check
index 721217c314..fedac3b746 100644
--- a/test/files/neg/patmat-type-check.check
+++ b/test/files/neg/patmat-type-check.check
@@ -1,12 +1,27 @@
patmat-type-check.scala:11: warning: fruitless type test: a value of type Test.Bop4[T] cannot also be a Seq[A]
def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
^
+patmat-type-check.scala:11: error: pattern type is incompatible with expected type;
+ found : Seq[A]
+ required: Test.Bop4[T]
+ def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
+ ^
patmat-type-check.scala:15: warning: fruitless type test: a value of type Test.Bop5[_$1,T1,T2] cannot also be a Seq[A]
def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
^
+patmat-type-check.scala:15: error: pattern type is incompatible with expected type;
+ found : Seq[A]
+ required: Test.Bop5[_$1,T1,T2] where type _$1
+ def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
+ ^
patmat-type-check.scala:19: warning: fruitless type test: a value of type Test.Bop3[T] cannot also be a Seq[A]
def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true }
^
+patmat-type-check.scala:19: error: pattern type is incompatible with expected type;
+ found : Seq[A]
+ required: Test.Bop3[T]
+ def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true }
+ ^
patmat-type-check.scala:22: error: scrutinee is incompatible with pattern type;
found : Seq[A]
required: String
@@ -28,4 +43,4 @@ patmat-type-check.scala:30: error: scrutinee is incompatible with pattern type;
def f4[T](x: Bop3[Char]) = x match { case Seq('b', 'o', 'b') => true } // fail
^
three warnings found
-four errors found
+7 errors found
diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check
index 4556e6622f..6069dfdaab 100644
--- a/test/files/neg/patmatexhaust.check
+++ b/test/files/neg/patmatexhaust.check
@@ -1,40 +1,42 @@
-patmatexhaust.scala:7: error: match may not be exhaustive.
+patmatexhaust.scala:7: warning: match may not be exhaustive.
It would fail on the following input: Baz
def ma1(x:Foo) = x match {
^
-patmatexhaust.scala:11: error: match may not be exhaustive.
+patmatexhaust.scala:11: warning: match may not be exhaustive.
It would fail on the following input: Bar(_)
def ma2(x:Foo) = x match {
^
-patmatexhaust.scala:23: error: match may not be exhaustive.
+patmatexhaust.scala:23: warning: match may not be exhaustive.
It would fail on the following inputs: (Kult(_), Kult(_)), (Qult(), Qult())
def ma3(x:Mult) = (x,x) match { // not exhaustive
^
-patmatexhaust.scala:49: error: match may not be exhaustive.
+patmatexhaust.scala:49: warning: match may not be exhaustive.
It would fail on the following inputs: Gp(), Gu
def ma4(x:Deep) = x match { // missing cases: Gu, Gp
^
-patmatexhaust.scala:55: error: unreachable code
+patmatexhaust.scala:55: warning: unreachable code
case _ if 1 == 0 =>
^
-patmatexhaust.scala:53: error: match may not be exhaustive.
+patmatexhaust.scala:53: warning: match may not be exhaustive.
It would fail on the following input: Gp()
def ma5(x:Deep) = x match {
^
-patmatexhaust.scala:75: error: match may not be exhaustive.
+patmatexhaust.scala:75: warning: match may not be exhaustive.
It would fail on the following input: B()
def ma9(x: B) = x match {
^
-patmatexhaust.scala:100: error: match may not be exhaustive.
+patmatexhaust.scala:100: warning: match may not be exhaustive.
It would fail on the following input: C1()
def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
^
-patmatexhaust.scala:114: error: match may not be exhaustive.
+patmatexhaust.scala:114: warning: match may not be exhaustive.
It would fail on the following inputs: D1, D2()
def ma10(x: C) = x match { // not exhaustive: C1 has subclasses.
^
-patmatexhaust.scala:126: error: match may not be exhaustive.
+patmatexhaust.scala:126: warning: match may not be exhaustive.
It would fail on the following input: C1()
def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
^
-10 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+10 warnings found
+one error found
diff --git a/test/files/neg/permanent-blindness.check b/test/files/neg/permanent-blindness.check
index 18b4543707..cdde201ef6 100644
--- a/test/files/neg/permanent-blindness.check
+++ b/test/files/neg/permanent-blindness.check
@@ -1,10 +1,12 @@
-permanent-blindness.scala:10: error: imported `Bippy' is permanently hidden by definition of class Bippy in package bar
+permanent-blindness.scala:10: warning: imported `Bippy' is permanently hidden by definition of class Bippy in package bar
import foo.{ Bippy, Bop, Dingus }
^
-permanent-blindness.scala:10: error: imported `Bop' is permanently hidden by definition of object Bop in package bar
+permanent-blindness.scala:10: warning: imported `Bop' is permanently hidden by definition of object Bop in package bar
import foo.{ Bippy, Bop, Dingus }
^
-permanent-blindness.scala:10: error: imported `Dingus' is permanently hidden by definition of object Dingus in package bar
+permanent-blindness.scala:10: warning: imported `Dingus' is permanently hidden by definition of object Dingus in package bar
import foo.{ Bippy, Bop, Dingus }
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/predef-masking.scala b/test/files/neg/predef-masking.scala
index 67b69aa169..6f4f4859d0 100644
--- a/test/files/neg/predef-masking.scala
+++ b/test/files/neg/predef-masking.scala
@@ -1,5 +1,5 @@
// Testing predef masking
-import Predef.{ any2stringadd => _, _ }
+import Predef.{ StringAdd => _, _ }
object StringPlusConfusion {
// Would love to do something about this error message, but by the
diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check
index f137158ed6..e295917050 100644
--- a/test/files/neg/protected-constructors.check
+++ b/test/files/neg/protected-constructors.check
@@ -19,7 +19,4 @@ protected-constructors.scala:15: error: class Foo3 in object Ding cannot be acce
object Ding in package dingus where target is defined
class Bar3 extends Ding.Foo3("abc")
^
-protected-constructors.scala:15: error: too many arguments for constructor Object: ()Object
- class Bar3 extends Ding.Foo3("abc")
- ^
-5 errors found
+four errors found
diff --git a/test/files/neg/sealed-final-neg.check b/test/files/neg/sealed-final-neg.check
new file mode 100644
index 0000000000..500d23f49a
--- /dev/null
+++ b/test/files/neg/sealed-final-neg.check
@@ -0,0 +1,4 @@
+sealed-final-neg.scala:41: error: expected class or object definition
+"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed."
+^
+one error found
diff --git a/test/files/neg/sealed-final-neg.flags b/test/files/neg/sealed-final-neg.flags
new file mode 100644
index 0000000000..cfabf7a5b4
--- /dev/null
+++ b/test/files/neg/sealed-final-neg.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Yinline-warnings -optimise \ No newline at end of file
diff --git a/test/files/neg/sealed-final-neg.scala b/test/files/neg/sealed-final-neg.scala
new file mode 100644
index 0000000000..bc25330e13
--- /dev/null
+++ b/test/files/neg/sealed-final-neg.scala
@@ -0,0 +1,41 @@
+package neg1 {
+ sealed abstract class Foo {
+ @inline def bar(x: Int) = x + 1
+ }
+ object Foo {
+ def mkFoo(): Foo = new Baz2
+ }
+
+ object Baz1 extends Foo
+ final class Baz2 extends Foo
+ final class Baz3 extends Foo {
+ override def bar(x: Int) = x - 1
+ }
+
+ object Test {
+ // bar can't be inlined - it is overridden in Baz3
+ def f = Foo.mkFoo() bar 10
+ }
+}
+
+package neg2 {
+ sealed abstract class Foo {
+ @inline def bar(x: Int) = x + 1
+ }
+ object Foo {
+ def mkFoo(): Foo = new Baz2
+ }
+
+ object Baz1 extends Foo
+ final class Baz2 extends Foo
+ class Baz3 extends Foo {
+ override def bar(x: Int) = x - 1
+ }
+
+ object Test {
+ // bar can't be inlined - Baz3 is not final
+ def f = Foo.mkFoo() bar 10
+ }
+}
+
+"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed."
diff --git a/test/files/neg/sealed-java-enums.check b/test/files/neg/sealed-java-enums.check
index 20d00c8e91..a3c39ec5cd 100644
--- a/test/files/neg/sealed-java-enums.check
+++ b/test/files/neg/sealed-java-enums.check
@@ -1,5 +1,7 @@
-sealed-java-enums.scala:5: error: match may not be exhaustive.
+sealed-java-enums.scala:5: warning: match may not be exhaustive.
It would fail on the following inputs: BLOCKED, TERMINATED, TIMED_WAITING
def f(state: State) = state match {
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/serialversionuid-not-const.check b/test/files/neg/serialversionuid-not-const.check
new file mode 100644
index 0000000000..9c383d97ad
--- /dev/null
+++ b/test/files/neg/serialversionuid-not-const.check
@@ -0,0 +1,10 @@
+serialversionuid-not-const.scala:1: error: annotation argument needs to be a constant; found: 13L.toLong
+@SerialVersionUID(13l.toLong) class C1 extends Serializable
+ ^
+serialversionuid-not-const.scala:3: error: annotation argument needs to be a constant; found: 13.asInstanceOf[Long]
+@SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable
+ ^
+serialversionuid-not-const.scala:4: error: annotation argument needs to be a constant; found: Test.bippy
+@SerialVersionUID(Test.bippy) class C4 extends Serializable
+ ^
+three errors found
diff --git a/test/files/neg/serialversionuid-not-const.scala b/test/files/neg/serialversionuid-not-const.scala
new file mode 100644
index 0000000000..f0e3ef4e7e
--- /dev/null
+++ b/test/files/neg/serialversionuid-not-const.scala
@@ -0,0 +1,16 @@
+@SerialVersionUID(13l.toLong) class C1 extends Serializable
+@SerialVersionUID(13l) class C2 extends Serializable
+@SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable
+@SerialVersionUID(Test.bippy) class C4 extends Serializable
+
+object Test {
+ val bippy = 13L
+
+ def show(c: Class[_]) = println(java.io.ObjectStreamClass.lookup(c).getSerialVersionUID)
+ def main(args: Array[String]): Unit = {
+ show(classOf[C1])
+ show(classOf[C2])
+ show(classOf[C3])
+ show(classOf[C4])
+ }
+}
diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check
index 2d6420a61d..1207e6da50 100644
--- a/test/files/neg/stmt-expr-discard.check
+++ b/test/files/neg/stmt-expr-discard.check
@@ -1,7 +1,9 @@
-stmt-expr-discard.scala:3: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+stmt-expr-discard.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 2
^
-stmt-expr-discard.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
- 4
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/switch.check b/test/files/neg/switch.check
index e4730b6459..f968d3a448 100644
--- a/test/files/neg/switch.check
+++ b/test/files/neg/switch.check
@@ -1,7 +1,9 @@
-switch.scala:38: error: could not emit switch for @switch annotated match
+switch.scala:38: warning: could not emit switch for @switch annotated match
def fail2(c: Char) = (c: @switch @unchecked) match {
^
-switch.scala:45: error: could not emit switch for @switch annotated match
+switch.scala:45: warning: could not emit switch for @switch annotated match
def fail3(c: Char) = (c: @unchecked @switch) match {
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t1010.scala b/test/files/neg/t1010.scala
index 7a1e6615e5..fd142978ec 100644
--- a/test/files/neg/t1010.scala
+++ b/test/files/neg/t1010.scala
@@ -6,9 +6,9 @@ class MailBox {
abstract class Actor {
private val in = new MailBox
- def send(msg: in.Message) = error("foo")
+ def send(msg: in.Message) = sys.error("foo")
- def unstable: Actor = error("foo")
+ def unstable: Actor = sys.error("foo")
def dubiousSend(msg: MailBox#Message): Nothing =
unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
diff --git a/test/files/neg/t1224.check b/test/files/neg/t1224.check
index fb61275911..ab8a6f1130 100644
--- a/test/files/neg/t1224.check
+++ b/test/files/neg/t1224.check
@@ -1,4 +1,4 @@
-t1224.scala:4: error: illegal cyclic reference involving type T
+t1224.scala:4: error: lower bound C[A.this.T] does not conform to upper bound C[C[A.this.T]]
type T >: C[T] <: C[C[T]]
^
one error found
diff --git a/test/files/neg/t1224.flags b/test/files/neg/t1224.flags
new file mode 100644
index 0000000000..ca20f55172
--- /dev/null
+++ b/test/files/neg/t1224.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/neg/t2148.check b/test/files/neg/t2148.check
index 5113b48e51..27b5dce507 100644
--- a/test/files/neg/t2148.check
+++ b/test/files/neg/t2148.check
@@ -1,4 +1,4 @@
-t2148.scala:9: error: type A is not a stable prefix
+t2148.scala:9: error: A is not a legal prefix for a constructor
val b = new A with A#A1
^
one error found
diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check
index 714816fd62..9ff0b44661 100644
--- a/test/files/neg/t2442.check
+++ b/test/files/neg/t2442.check
@@ -1,9 +1,11 @@
-t2442.scala:4: error: match may not be exhaustive.
+t2442.scala:4: warning: match may not be exhaustive.
It would fail on the following input: THREE
def f(e: MyEnum) = e match {
^
-t2442.scala:11: error: match may not be exhaustive.
+t2442.scala:11: warning: match may not be exhaustive.
It would fail on the following input: BLUE
def g(e: MySecondEnum) = e match {
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check
index 909f4f0cf3..a0a960f0ea 100644
--- a/test/files/neg/t2641.check
+++ b/test/files/neg/t2641.check
@@ -1,15 +1,7 @@
t2641.scala:18: error: wrong number of type arguments for ManagedSeq, should be 2
with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
^
-t2641.scala:16: error: illegal inheritance;
- self-type ManagedSeq does not conform to ManagedSeqStrict[A]'s selftype ManagedSeqStrict[A]
- extends ManagedSeqStrict[A]
- ^
-t2641.scala:17: error: illegal inheritance;
- self-type ManagedSeq does not conform to scala.collection.TraversableView[A,ManagedSeqStrict[A]]'s selftype scala.collection.TraversableView[A,ManagedSeqStrict[A]]
- with TraversableView[A, ManagedSeqStrict[A]]
- ^
-t2641.scala:27: error: value managedIterator is not a member of ManagedSeq
+t2641.scala:27: error: value managedIterator is not a member of ManagedSeq[A,Coll]
override def managedIterator = self.managedIterator slice (from, until)
^
-four errors found
+two errors found
diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check
index aeb18497ed..4456a7fc19 100644
--- a/test/files/neg/t2796.check
+++ b/test/files/neg/t2796.check
@@ -1,4 +1,6 @@
-t2796.scala:7: error: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+t2796.scala:7: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
val abstractVal = "T1.abstractVal" // warn
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t3098.check b/test/files/neg/t3098.check
index 85829747b9..5343b128f0 100644
--- a/test/files/neg/t3098.check
+++ b/test/files/neg/t3098.check
@@ -1,5 +1,7 @@
-b.scala:3: error: match may not be exhaustive.
+b.scala:3: warning: match may not be exhaustive.
It would fail on the following input: (_ : C)
def f = (null: T) match {
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t3160ambiguous.check b/test/files/neg/t3160ambiguous.check
new file mode 100644
index 0000000000..e80d9a5461
--- /dev/null
+++ b/test/files/neg/t3160ambiguous.check
@@ -0,0 +1,7 @@
+t3160ambiguous.scala:8: error: reference to Node is ambiguous;
+it is imported twice in the same scope by
+import scala.xml._
+and import Bippy._
+ def f(x: Node): String = ??? // ambiguous, because Bippy.Node is accessible
+ ^
+one error found
diff --git a/test/files/neg/t3160ambiguous.scala b/test/files/neg/t3160ambiguous.scala
new file mode 100644
index 0000000000..cb9759b79c
--- /dev/null
+++ b/test/files/neg/t3160ambiguous.scala
@@ -0,0 +1,15 @@
+object Bippy {
+ private class Node
+}
+class Bippy {
+ import Bippy._
+ import scala.xml._
+
+ def f(x: Node): String = ??? // ambiguous, because Bippy.Node is accessible
+}
+class Other {
+ import Bippy._
+ import scala.xml._
+
+ def f(x: Node): String = ??? // unambiguous, because Bippy.Node is inaccessible
+}
diff --git a/test/files/neg/t3224.check b/test/files/neg/t3224.check
index 29304c567a..69b02c8862 100644
--- a/test/files/neg/t3224.check
+++ b/test/files/neg/t3224.check
@@ -1,6 +1,26 @@
-t3224.scala:29: error: polymorphic expression cannot be instantiated to expected type;
+t3224.scala:30: error: polymorphic expression cannot be instantiated to expected type;
found : [T]Array[T]
required: List[?]
- println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1))
- ^
-one error found
+ println(Texts textL Array())
+ ^
+t3224.scala:34: error: type mismatch;
+ found : List[Nothing]
+ required: Array[?]
+ println(Texts textA List())
+ ^
+t3224.scala:35: error: type mismatch;
+ found : List[Int]
+ required: Array[?]
+ println(Texts textA List(1))
+ ^
+t3224.scala:36: error: type mismatch;
+ found : List[Int]
+ required: Array[?]
+ println(Texts textA List(1, 1));
+ ^
+t3224.scala:48: error: polymorphic expression cannot be instantiated to expected type;
+ found : [T]Array[T]
+ required: List[?]
+ assert(size(Array()) == 0)
+ ^
+5 errors found
diff --git a/test/files/neg/t3224.scala b/test/files/neg/t3224.scala
index 774de3335a..b7af8a67b5 100755
--- a/test/files/neg/t3224.scala
+++ b/test/files/neg/t3224.scala
@@ -1,30 +1,50 @@
object Texts{
- def textL[T](list: List[T]) = {
- list match{
- case List() => "Empty"
- case List(_) => "One"
+ def textL[T](list: List[T]) = {
+ list match{
+ case List() => "Empty"
+ case List(_) => "One"
case List(_*) => "Many"
}
}
- def textA[T](array: Array[T]) = {
- array match{
- case Array() => "Empty"
- case Array(_) => "One"
+ def textA[T](array: Array[T]) = {
+ array match{
+ case Array() => "Empty"
+ case Array(_) => "One"
case Array(_*) => "Many"
}
}
}
object Test extends App {
+ {
+ implicit def array2list[T](array: Array[T]) = {
+ println(array.toList.size)
+ array.toList
+ }
+
+ println(Texts textL List())
+ println(Texts textL List(1))
+ println(Texts textL List(1, 1));
+
+ println(Texts textL Array())
+ println(Texts textL Array(1))
+ println(Texts textL Array(1, 1))
- implicit def array2list[T](array: Array[T]) = {
- println(array.toList.size)
- array.toList
+ println(Texts textA List())
+ println(Texts textA List(1))
+ println(Texts textA List(1, 1));
+
+ println(Texts textA Array())
+ println(Texts textA Array(1))
+ println(Texts textA Array(1, 1))
}
-
- println(Texts textL List()); println(Texts textL List(1)); println(Texts textL List(1, 1));
+ {
+ implicit def array2list[T](array: Array[T]) = array.toList
+ def size[T](list: List[T]) = list.size
- println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1))
+ assert(size(array2list(Array())) == 0)
+ assert(size(Array()) == 0)
+ }
}
diff --git a/test/files/neg/t3234.check b/test/files/neg/t3234.check
index 4339950ecb..8f0d624ed9 100644
--- a/test/files/neg/t3234.check
+++ b/test/files/neg/t3234.check
@@ -1,2 +1,6 @@
-error: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+t3234.scala:17: warning: At the end of the day, could not inline @inline-marked method foo3
+ println(foo(42) + foo2(11) + foo3(2))
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t3234.flags b/test/files/neg/t3234.flags
index c9cefdc4b9..cc3d9fb6f0 100644
--- a/test/files/neg/t3234.flags
+++ b/test/files/neg/t3234.flags
@@ -1 +1 @@
--Yinline -Xfatal-warnings \ No newline at end of file
+-Yinline -Yinline-warnings -Xfatal-warnings
diff --git a/test/files/neg/t3683a.check b/test/files/neg/t3683a.check
index 3de3ad784e..6386265ebc 100644
--- a/test/files/neg/t3683a.check
+++ b/test/files/neg/t3683a.check
@@ -1,5 +1,7 @@
-t3683a.scala:14: error: match may not be exhaustive.
+t3683a.scala:14: warning: match may not be exhaustive.
It would fail on the following input: XX()
w match {
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t3692-new.check b/test/files/neg/t3692-new.check
index 5aa991c105..9b96449930 100644
--- a/test/files/neg/t3692-new.check
+++ b/test/files/neg/t3692-new.check
@@ -7,8 +7,13 @@ t3692-new.scala:15: warning: non-variable type argument Int in type pattern Map[
t3692-new.scala:16: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure
case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
^
-t3692-new.scala:16: error: unreachable code
- case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+t3692-new.scala:15: warning: unreachable code
+ case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
^
-three warnings found
+t3692-new.scala:4: warning: Tester has a main method with parameter type Array[String], but Tester will not be a runnable program.
+ Reason: main method must have exact signature (Array[String])Unit
+object Tester {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
one error found
diff --git a/test/files/neg/t3692-new.flags b/test/files/neg/t3692-new.flags
index cb8324a345..85d8eb2ba2 100644
--- a/test/files/neg/t3692-new.flags
+++ b/test/files/neg/t3692-new.flags
@@ -1 +1 @@
--Xoldpatmat \ No newline at end of file
+-Xfatal-warnings
diff --git a/test/files/neg/t3692-old.check b/test/files/neg/t3692-old.check
deleted file mode 100644
index 9f3ae516aa..0000000000
--- a/test/files/neg/t3692-old.check
+++ /dev/null
@@ -1,14 +0,0 @@
-t3692-old.scala:13: warning: non-variable type argument Int in type pattern Map[Int,Int] is unchecked since it is eliminated by erasure
- case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
- ^
-t3692-old.scala:14: warning: non-variable type argument Int in type pattern Map[Int,V] is unchecked since it is eliminated by erasure
- case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
- ^
-t3692-old.scala:15: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure
- case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
- ^
-t3692-old.scala:15: error: unreachable code
- case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
- ^
-three warnings found
-one error found
diff --git a/test/files/neg/t3692-old.flags b/test/files/neg/t3692-old.flags
deleted file mode 100644
index cb8324a345..0000000000
--- a/test/files/neg/t3692-old.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xoldpatmat \ No newline at end of file
diff --git a/test/files/neg/t3692-old.scala b/test/files/neg/t3692-old.scala
deleted file mode 100644
index 151535ae94..0000000000
--- a/test/files/neg/t3692-old.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import java.lang.Integer
-
-object ManifestTester {
- def main(args: Array[String]) = {
- val map = Map("John" -> 1, "Josh" -> 2)
- new ManifestTester().toJavaMap(map)
- }
-}
-
-class ManifestTester {
- private final def toJavaMap[T, V](map: Map[T, V])(implicit m1: Manifest[T], m2: Manifest[V]): java.util.Map[_, _] = {
- map match {
- case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
- case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
- case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
- case _ => new java.util.HashMap[T, V]
- }
- }
-} \ No newline at end of file
diff --git a/test/files/neg/t409.check b/test/files/neg/t409.check
index 433d64d25d..0edc0d03cd 100644
--- a/test/files/neg/t409.check
+++ b/test/files/neg/t409.check
@@ -1,4 +1,4 @@
-t409.scala:6: error: traits or objects may not have parameters
+t409.scala:6: error: class Case1 needs to be a trait to be mixed in
class Toto extends Expr with Case1(12);
- ^
+ ^
one error found
diff --git a/test/files/neg/t414.scala b/test/files/neg/t414.scala
index 2bc83eedcb..1662b9a105 100644
--- a/test/files/neg/t414.scala
+++ b/test/files/neg/t414.scala
@@ -3,7 +3,7 @@ case class Node[a](left: IntMap[a], keyVal: Pair[Int, a], right: IntMap[a]) exte
abstract class IntMap[a] {
def lookup(key: Int): a = this match {
case Empty =>
- error("clef inexistante")
+ sys.error("clef inexistante")
case _ =>
};
diff --git a/test/files/neg/t421.check b/test/files/neg/t421.check
index e81df52ab0..dc5fa425ac 100644
--- a/test/files/neg/t421.check
+++ b/test/files/neg/t421.check
@@ -1,4 +1,4 @@
t421.scala:5: error: star patterns must correspond with varargs parameters
- case Bar("foo",_*) => error("huh?");
+ case Bar("foo",_*) => sys.error("huh?");
^
one error found
diff --git a/test/files/neg/t421.scala b/test/files/neg/t421.scala
index 43f6c9dafd..9a327be896 100644
--- a/test/files/neg/t421.scala
+++ b/test/files/neg/t421.scala
@@ -2,7 +2,7 @@ object foo {
case class Bar(a:String, b:AnyRef, c:String*);
Bar("foo","meets","bar") match {
- case Bar("foo",_*) => error("huh?");
+ case Bar("foo",_*) => sys.error("huh?");
}
}
diff --git a/test/files/neg/t4271.scala b/test/files/neg/t4271.scala
index 50526c8958..46ae3ad9ec 100644
--- a/test/files/neg/t4271.scala
+++ b/test/files/neg/t4271.scala
@@ -1,11 +1,11 @@
object foo {
object Donotuseme
- implicit def any2Ensuring[A](x: A) = Donotuseme
+ implicit def Ensuring[A](x: A) = Donotuseme
implicit def doubleWrapper(x: Int) = Donotuseme
implicit def floatWrapper(x: Int) = Donotuseme
implicit def intWrapper(x: Int) = Donotuseme
implicit def longWrapper(x: Int) = Donotuseme
- implicit def any2ArrowAssoc[A](x: A) = Donotuseme
+ implicit def ArrowAssoc[A](x: A) = Donotuseme
3 to 5
5 ensuring true
3 -> 5
diff --git a/test/files/neg/t4302.check b/test/files/neg/t4302.check
index 450d28bbc5..ea48729276 100644
--- a/test/files/neg/t4302.check
+++ b/test/files/neg/t4302.check
@@ -1,4 +1,6 @@
-t4302.scala:2: error: abstract type T is unchecked since it is eliminated by erasure
+t4302.scala:2: warning: abstract type T is unchecked since it is eliminated by erasure
def hasMatch[T](x: AnyRef) = x.isInstanceOf[T]
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t4440.check b/test/files/neg/t4440.check
index 2861dc3040..10e7188e32 100644
--- a/test/files/neg/t4440.check
+++ b/test/files/neg/t4440.check
@@ -1,13 +1,15 @@
-t4440.scala:12: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:12: warning: The outer reference in this type test cannot be checked at run time.
case _: b.Inner => println("b")
^
-t4440.scala:13: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:13: warning: The outer reference in this type test cannot be checked at run time.
case _: a.Inner => println("a") // this is the case we want
^
-t4440.scala:16: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:16: warning: The outer reference in this type test cannot be checked at run time.
case _: a.Inner => println("a")
^
-t4440.scala:17: error: The outer reference in this type test cannot be checked at run time.
+t4440.scala:17: warning: The outer reference in this type test cannot be checked at run time.
case _: b.Inner => println("b") // this is the case we want
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t4460a.check b/test/files/neg/t4460a.check
new file mode 100644
index 0000000000..b711e7acb1
--- /dev/null
+++ b/test/files/neg/t4460a.check
@@ -0,0 +1,4 @@
+t4460a.scala:6: error: called constructor's definition must precede calling constructor's definition
+ def this() = this() // was binding to Predef.<init> !!
+ ^
+one error found
diff --git a/test/files/neg/t4460a.scala b/test/files/neg/t4460a.scala
new file mode 100644
index 0000000000..0a7a22178d
--- /dev/null
+++ b/test/files/neg/t4460a.scala
@@ -0,0 +1,7 @@
+trait A
+
+class B(val x: Int) {
+ self: A =>
+
+ def this() = this() // was binding to Predef.<init> !!
+}
diff --git a/test/files/neg/t4460b.check b/test/files/neg/t4460b.check
new file mode 100644
index 0000000000..f0e703fd10
--- /dev/null
+++ b/test/files/neg/t4460b.check
@@ -0,0 +1,4 @@
+t4460b.scala:7: error: called constructor's definition must precede calling constructor's definition
+ def this() = this() // was binding to Predef.<init> !!
+ ^
+one error found
diff --git a/test/files/neg/t4460b.scala b/test/files/neg/t4460b.scala
new file mode 100644
index 0000000000..1233017dd4
--- /dev/null
+++ b/test/files/neg/t4460b.scala
@@ -0,0 +1,9 @@
+trait A
+
+class Outer() {
+ class B(val x: Int) {
+ self: A =>
+
+ def this() = this() // was binding to Predef.<init> !!
+ }
+}
diff --git a/test/files/neg/t4460c.check b/test/files/neg/t4460c.check
new file mode 100644
index 0000000000..4e96711b8b
--- /dev/null
+++ b/test/files/neg/t4460c.check
@@ -0,0 +1,7 @@
+t4460c.scala:4: error: overloaded method constructor B with alternatives:
+ (a: String)B <and>
+ (x: Int)B
+ cannot be applied to ()
+ def this(a: String) = this()
+ ^
+one error found
diff --git a/test/files/neg/t4460c.scala b/test/files/neg/t4460c.scala
new file mode 100644
index 0000000000..1ae258508e
--- /dev/null
+++ b/test/files/neg/t4460c.scala
@@ -0,0 +1,7 @@
+class B(val x: Int) {
+ self: A =>
+
+ def this(a: String) = this()
+}
+
+class A()
diff --git a/test/files/neg/t4537.check b/test/files/neg/t4537.check
deleted file mode 100644
index 931bcd0405..0000000000
--- a/test/files/neg/t4537.check
+++ /dev/null
@@ -1,4 +0,0 @@
-c.scala:7: error: object Settings in package a cannot be accessed in package a
- println(Settings.Y)
- ^
-one error found
diff --git a/test/files/neg/t4537/c.scala b/test/files/neg/t4537/c.scala
deleted file mode 100644
index 379599112d..0000000000
--- a/test/files/neg/t4537/c.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package b
-package c
-
-import a._
-
-object Test {
- println(Settings.Y)
-} \ No newline at end of file
diff --git a/test/files/neg/t4691_exhaust_extractor.check b/test/files/neg/t4691_exhaust_extractor.check
index cd12e56f86..6396944145 100644
--- a/test/files/neg/t4691_exhaust_extractor.check
+++ b/test/files/neg/t4691_exhaust_extractor.check
@@ -1,13 +1,15 @@
-t4691_exhaust_extractor.scala:17: error: match may not be exhaustive.
+t4691_exhaust_extractor.scala:17: warning: match may not be exhaustive.
It would fail on the following input: Bar3()
def f1(x: Foo) = x match {
^
-t4691_exhaust_extractor.scala:23: error: match may not be exhaustive.
+t4691_exhaust_extractor.scala:23: warning: match may not be exhaustive.
It would fail on the following input: Bar3()
def f2(x: Foo) = x match {
^
-t4691_exhaust_extractor.scala:29: error: match may not be exhaustive.
+t4691_exhaust_extractor.scala:29: warning: match may not be exhaustive.
It would fail on the following input: Bar3()
def f3(x: Foo) = x match {
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t4749.check b/test/files/neg/t4749.check
index 93ad3935fa..34eed6e433 100644
--- a/test/files/neg/t4749.check
+++ b/test/files/neg/t4749.check
@@ -1,28 +1,30 @@
-t4749.scala:2: error: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program.
+t4749.scala:2: warning: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program.
Reason: main method must have exact signature (Array[String])Unit
object Fail1 {
^
-t4749.scala:6: error: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program.
+t4749.scala:6: warning: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program.
Reason: main methods cannot be generic.
object Fail2 {
^
-t4749.scala:13: error: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program.
+t4749.scala:13: warning: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program.
Reason: main methods cannot refer to type parameters or abstract types.
object Fail3 extends Bippy[Unit] { }
^
-t4749.scala:16: error: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program.
+t4749.scala:16: warning: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program.
Reason: companion is a trait, which means no static forwarder can be generated.
object Fail4 {
^
-t4749.scala:21: error: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program.
+t4749.scala:21: warning: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program.
Reason: companion contains its own main method, which means no static forwarder can be generated.
object Fail5 extends Fail5 { }
^
-t4749.scala:26: error: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program.
+t4749.scala:26: warning: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program.
Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated.
object Fail6 {
^
-6 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t4762.check b/test/files/neg/t4762.check
index 5e67f2022a..a0525f6226 100644
--- a/test/files/neg/t4762.check
+++ b/test/files/neg/t4762.check
@@ -1,7 +1,9 @@
-t4762.scala:15: error: private[this] value x in class B shadows mutable x inherited from class A. Changes to x will not be visible within class B - you may want to give them distinct names.
+t4762.scala:15: warning: private[this] value x in class B shadows mutable x inherited from class A. Changes to x will not be visible within class B - you may want to give them distinct names.
/* (99,99) */ (this.x, this.y),
^
-t4762.scala:48: error: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names.
+t4762.scala:48: warning: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names.
class Derived( x : Int ) extends Base( x ) { override def toString = x.toString }
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check
index 9633fdffed..4f2919807e 100644
--- a/test/files/neg/t4851.check
+++ b/test/files/neg/t4851.check
@@ -1,49 +1,51 @@
-S.scala:2: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
+S.scala:2: warning: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
signature: J(x: Any): J
given arguments: <none>
after adaptation: new J((): Unit)
val x1 = new J
^
-S.scala:3: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
+S.scala:3: warning: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
signature: J(x: Any): J
given arguments: <none>
after adaptation: new J((): Unit)
val x2 = new J()
^
-S.scala:4: error: Adapting argument list by creating a 5-tuple: this may not be what you want.
+S.scala:4: warning: Adapting argument list by creating a 5-tuple: this may not be what you want.
signature: J(x: Any): J
given arguments: 1, 2, 3, 4, 5
after adaptation: new J((1, 2, 3, 4, 5): (Int, Int, Int, Int, Int))
val x3 = new J(1, 2, 3, 4, 5)
^
-S.scala:6: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+S.scala:6: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
signature: Some.apply[A](x: A): Some[A]
given arguments: 1, 2, 3
after adaptation: Some((1, 2, 3): (Int, Int, Int))
val y1 = Some(1, 2, 3)
^
-S.scala:7: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
signature: Some(x: A): Some[A]
given arguments: 1, 2, 3
after adaptation: new Some((1, 2, 3): (Int, Int, Int))
val y2 = new Some(1, 2, 3)
^
-S.scala:9: error: Adapting argument list by inserting (): this is unlikely to be what you want.
+S.scala:9: warning: Adapting argument list by inserting (): this is unlikely to be what you want.
signature: J2[T](x: T): J2[T]
given arguments: <none>
after adaptation: new J2((): Unit)
val z1 = new J2
^
-S.scala:10: error: Adapting argument list by inserting (): this is unlikely to be what you want.
+S.scala:10: warning: Adapting argument list by inserting (): this is unlikely to be what you want.
signature: J2[T](x: T): J2[T]
given arguments: <none>
after adaptation: new J2((): Unit)
val z2 = new J2()
^
-S.scala:14: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+S.scala:14: warning: Adapting argument list by creating a 3-tuple: this may not be what you want.
signature: Test.anyId(a: Any): Any
given arguments: 1, 2, 3
after adaptation: Test.anyId((1, 2, 3): (Int, Int, Int))
val w1 = anyId(1, 2 ,3)
^
-8 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/t5182.check b/test/files/neg/t5182.check
new file mode 100644
index 0000000000..3161f92680
--- /dev/null
+++ b/test/files/neg/t5182.check
@@ -0,0 +1,7 @@
+t5182.scala:2: error: unknown annotation argument name: qwe
+ @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1
+ ^
+t5182.scala:3: error: classfile annotation arguments have to be supplied as named arguments
+ @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1
+ ^
+two errors found
diff --git a/test/files/neg/t5182.flags b/test/files/neg/t5182.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t5182.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t5182.scala b/test/files/neg/t5182.scala
new file mode 100644
index 0000000000..0687e99efb
--- /dev/null
+++ b/test/files/neg/t5182.scala
@@ -0,0 +1,5 @@
+class test {
+ @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1
+ @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1
+ @java.lang.Deprecated() def bippy(q:Int) = 1
+}
diff --git a/test/files/neg/t5353.check b/test/files/neg/t5353.check
deleted file mode 100644
index 75e2435600..0000000000
--- a/test/files/neg/t5353.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t5353.scala:2: error: this type parameter must be specified
- def f(x: Boolean) = if (x) Array("abc") else Array()
- ^
-one error found
diff --git a/test/files/neg/t5353.scala b/test/files/neg/t5353.scala
deleted file mode 100644
index 1ee869aac1..0000000000
--- a/test/files/neg/t5353.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- def f(x: Boolean) = if (x) Array("abc") else Array()
-}
diff --git a/test/files/neg/t5426.check b/test/files/neg/t5426.check
index d9e192d3f0..98f3ddaaae 100644
--- a/test/files/neg/t5426.check
+++ b/test/files/neg/t5426.check
@@ -1,13 +1,15 @@
-t5426.scala:2: error: comparing values of types Some[Int] and Int using `==' will always yield false
+t5426.scala:2: warning: comparing values of types Some[Int] and Int using `==' will always yield false
def f1 = Some(5) == 5
^
-t5426.scala:3: error: comparing values of types Int and Some[Int] using `==' will always yield false
+t5426.scala:3: warning: comparing values of types Int and Some[Int] using `==' will always yield false
def f2 = 5 == Some(5)
^
-t5426.scala:8: error: comparing values of types Int and Some[Int] using `==' will always yield false
+t5426.scala:8: warning: comparing values of types Int and Some[Int] using `==' will always yield false
(x1 == x2)
^
-t5426.scala:9: error: comparing values of types Some[Int] and Int using `==' will always yield false
+t5426.scala:9: warning: comparing values of types Some[Int] and Int using `==' will always yield false
(x2 == x1)
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t5440.check b/test/files/neg/t5440.check
index a862350a05..1c4592ccec 100644
--- a/test/files/neg/t5440.check
+++ b/test/files/neg/t5440.check
@@ -1,5 +1,7 @@
-t5440.scala:3: error: match may not be exhaustive.
+t5440.scala:3: warning: match may not be exhaustive.
It would fail on the following inputs: (List(_), Nil), (Nil, List(_))
(list1, list2) match {
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t5529.check b/test/files/neg/t5529.check
index 5d2175fa79..da3f84e1ec 100644
--- a/test/files/neg/t5529.check
+++ b/test/files/neg/t5529.check
@@ -4,7 +4,4 @@ t5529.scala:12: error: File is already defined as class File
t5529.scala:10: error: class type required but test.Test.File found
sealed class Dir extends File { }
^
-t5529.scala:10: error: test.Test.File does not have a constructor
- sealed class Dir extends File { }
- ^
-three errors found
+two errors found
diff --git a/test/files/neg/t5663-badwarneq.check b/test/files/neg/t5663-badwarneq.check
index 00c2234e9d..12e93ff373 100644
--- a/test/files/neg/t5663-badwarneq.check
+++ b/test/files/neg/t5663-badwarneq.check
@@ -1,22 +1,24 @@
-t5663-badwarneq.scala:42: error: comparing case class values of types Some[Int] and None.type using `==' will always yield false
+t5663-badwarneq.scala:42: warning: comparing case class values of types Some[Int] and None.type using `==' will always yield false
println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object
^
-t5663-badwarneq.scala:43: error: comparing case class values of types Some[Int] and Thing using `==' will always yield false
+t5663-badwarneq.scala:43: warning: comparing case class values of types Some[Int] and Thing using `==' will always yield false
println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object
^
-t5663-badwarneq.scala:51: error: ThingOne and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:51: warning: ThingOne and Thingy are unrelated: they will most likely never compare equal
println(t1 == t2) // true, but apparently unrelated, a compromise warning
^
-t5663-badwarneq.scala:52: error: ThingThree and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:52: warning: ThingThree and Thingy are unrelated: they will most likely never compare equal
println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated
^
-t5663-badwarneq.scala:55: error: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
+t5663-badwarneq.scala:55: warning: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
println(t3 == Some(1)) // false, warn on different cases
^
-t5663-badwarneq.scala:56: error: comparing values of types ThingOne and Cousin using `==' will always yield false
+t5663-badwarneq.scala:56: warning: comparing values of types ThingOne and Cousin using `==' will always yield false
println(t1 == c) // should warn
^
-t5663-badwarneq.scala:64: error: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
+t5663-badwarneq.scala:64: warning: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
^
-7 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+7 warnings found
+one error found
diff --git a/test/files/neg/t5696.check b/test/files/neg/t5696.check
index 72b7781fc4..e0fb61b839 100644
--- a/test/files/neg/t5696.check
+++ b/test/files/neg/t5696.check
@@ -15,5 +15,5 @@ t5696.scala:38: error: too many argument lists for constructor invocation
^
t5696.scala:46: error: too many argument lists for constructor invocation
object x extends G(1)(2) {}
- ^
+ ^
6 errors found
diff --git a/test/files/neg/t5753.check b/test/files/neg/t5753.check
index 76602de17d..379416c179 100644
--- a/test/files/neg/t5753.check
+++ b/test/files/neg/t5753.check
@@ -1,4 +1,5 @@
-Test_2.scala:9: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+Test_2.scala:9: error: macro implementation not found: foo
+(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
println(foo(42))
^
one error found
diff --git a/test/files/neg/t5762.check b/test/files/neg/t5762.check
index 10064032aa..2a2f12144a 100644
--- a/test/files/neg/t5762.check
+++ b/test/files/neg/t5762.check
@@ -1,13 +1,15 @@
-t5762.scala:6: error: non-variable type argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure
+t5762.scala:6: warning: non-variable type argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure
case _: D[Int] if bippy => 1
^
-t5762.scala:7: error: non-variable type argument String in type pattern D[String] is unchecked since it is eliminated by erasure
+t5762.scala:7: warning: non-variable type argument String in type pattern D[String] is unchecked since it is eliminated by erasure
case _: D[String] => 2
^
-t5762.scala:20: error: non-variable type argument D[Int] in type pattern D[D[Int]] is unchecked since it is eliminated by erasure
+t5762.scala:20: warning: non-variable type argument D[Int] in type pattern D[D[Int]] is unchecked since it is eliminated by erasure
case _: D[D[Int]] if bippy => 1
^
-t5762.scala:21: error: non-variable type argument D[String] in type pattern D[D[String]] is unchecked since it is eliminated by erasure
+t5762.scala:21: warning: non-variable type argument D[String] in type pattern D[D[String]] is unchecked since it is eliminated by erasure
case _: D[D[String]] => 2
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t5830.check b/test/files/neg/t5830.check
index 726fac2a1e..58c3a1be38 100644
--- a/test/files/neg/t5830.check
+++ b/test/files/neg/t5830.check
@@ -1,7 +1,9 @@
-t5830.scala:6: error: unreachable code
+t5830.scala:6: warning: unreachable code
case 'a' => println("b") // unreachable
^
-t5830.scala:4: error: could not emit switch for @switch annotated match
+t5830.scala:4: warning: could not emit switch for @switch annotated match
def unreachable(ch: Char) = (ch: @switch) match {
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t5954.check b/test/files/neg/t5954.check
new file mode 100644
index 0000000000..3950d14e4e
--- /dev/null
+++ b/test/files/neg/t5954.check
@@ -0,0 +1,18 @@
+t5954.scala:36: warning: class D should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
+ case class D()
+ ^
+t5954.scala:35: warning: object C should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
+ object C
+ ^
+t5954.scala:34: warning: trait C should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
+ trait C
+ ^
+t5954.scala:33: warning: object B should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
+ object B
+ ^
+t5954.scala:32: warning: class B should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
+ class B
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/t5954.flags b/test/files/neg/t5954.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t5954.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t5954.scala b/test/files/neg/t5954.scala
new file mode 100644
index 0000000000..3ccb5ed3ff
--- /dev/null
+++ b/test/files/neg/t5954.scala
@@ -0,0 +1,46 @@
+// if you ever think you've fixed the underlying reason for the warning
+// imposed by SI-5954, then here's a test that should pass with two "succes"es
+//
+//import scala.tools.partest._
+//
+//object Test extends DirectTest {
+// def code = ???
+//
+// def problemCode = """
+// package object A {
+// class B
+// object B
+// case class C()
+// }
+// """
+//
+// def compileProblemCode() = {
+// val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+// compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(problemCode)
+// }
+//
+// def show() : Unit = {
+// for (i <- 0 until 2) {
+// compileProblemCode()
+// println(s"success ${i + 1}")
+// }
+// }
+//}
+
+package object A {
+ // these should be prevented by the implementation restriction
+ class B
+ object B
+ trait C
+ object C
+ case class D()
+ // all the rest of these should be ok
+ class E
+ object F
+ val g = "omg"
+ var h = "wtf"
+ def i = "lol"
+ type j = String
+ class K(val k : Int) extends AnyVal
+ implicit class L(val l : Int)
+}
diff --git a/test/files/neg/t5956.check b/test/files/neg/t5956.check
index 6641dac97f..f5ae42c799 100644
--- a/test/files/neg/t5956.check
+++ b/test/files/neg/t5956.check
@@ -1,20 +1,7 @@
-t5956.scala:1: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-object O { case class C[T]; class C }
- ^
-t5956.scala:2: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-object T { case class C[T]; case class C }
- ^
-t5956.scala:2: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-object T { case class C[T]; case class C }
- ^
t5956.scala:1: error: C is already defined as case class C
-object O { case class C[T]; class C }
- ^
+object O { case class C[T](); class C() }
+ ^
t5956.scala:2: error: C is already defined as case class C
-object T { case class C[T]; case class C }
- ^
-three warnings found
+object T { case class C[T](); case class C() }
+ ^
two errors found
diff --git a/test/files/neg/t5956.scala b/test/files/neg/t5956.scala
index d985fa97a4..3cc10f3e19 100644
--- a/test/files/neg/t5956.scala
+++ b/test/files/neg/t5956.scala
@@ -1,2 +1,2 @@
-object O { case class C[T]; class C }
-object T { case class C[T]; case class C }
+object O { case class C[T](); class C() }
+object T { case class C[T](); case class C() }
diff --git a/test/files/neg/t6011.check b/test/files/neg/t6011.check
index 5b5a861e5b..cb7f189031 100644
--- a/test/files/neg/t6011.check
+++ b/test/files/neg/t6011.check
@@ -1,10 +1,12 @@
-t6011.scala:4: error: unreachable code
+t6011.scala:4: warning: unreachable code
case 'a' | 'c' => 1 // unreachable
^
-t6011.scala:10: error: unreachable code
+t6011.scala:10: warning: unreachable code
case 'b' | 'a' => 1 // unreachable
^
-t6011.scala:8: error: could not emit switch for @switch annotated match
+t6011.scala:8: warning: could not emit switch for @switch annotated match
def f2(ch: Char): Any = (ch: @annotation.switch) match {
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t6048.check b/test/files/neg/t6048.check
index 5bdf2eca88..5e11d24fde 100644
--- a/test/files/neg/t6048.check
+++ b/test/files/neg/t6048.check
@@ -1,13 +1,15 @@
-t6048.scala:3: error: unreachable code
+t6048.scala:3: warning: unreachable code
case _ if false => x // unreachable
^
-t6048.scala:8: error: unreachable code
+t6048.scala:8: warning: unreachable code
case _ if false => x // unreachable
^
-t6048.scala:13: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+t6048.scala:13: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
case _ => x
^
-t6048.scala:14: error: unreachable code due to variable pattern on line 13
+t6048.scala:14: warning: unreachable code due to variable pattern on line 13
case 5 if true => x // unreachable
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t6083.check b/test/files/neg/t6083.check
new file mode 100644
index 0000000000..c9b5ba05d3
--- /dev/null
+++ b/test/files/neg/t6083.check
@@ -0,0 +1,10 @@
+t6083.scala:6: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime. If that is what
+you want, you must write the annotation class in Java.
+class annot(value: String) extends annotation.ClassfileAnnotation
+ ^
+t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101)
+@annot(101) class C
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/t6083.scala b/test/files/neg/t6083.scala
new file mode 100644
index 0000000000..1de18e6527
--- /dev/null
+++ b/test/files/neg/t6083.scala
@@ -0,0 +1,7 @@
+object conv {
+ implicit def i2s(i: Int): String = ""
+}
+import conv._
+
+class annot(value: String) extends annotation.ClassfileAnnotation
+@annot(101) class C
diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check
index a7d3cc3238..e98fa79eb7 100644
--- a/test/files/neg/t6162-inheritance.check
+++ b/test/files/neg/t6162-inheritance.check
@@ -1,10 +1,18 @@
-t6162-inheritance.scala:6: error: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+t6162-inheritance.scala:6: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
class SubFoo extends Foo
^
-t6162-inheritance.scala:11: error: inheritance from trait T in package t6126 is deprecated
+t6162-inheritance.scala:11: warning: inheritance from trait T in package t6126 is deprecated
object SubT extends T
^
-t6162-inheritance.scala:17: error: inheritance from trait S in package t6126 is deprecated
+t6162-inheritance.scala:17: warning: inheritance from trait S in package t6126 is deprecated
new S {
^
-three errors found
+t6162-inheritance.scala:6: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+class SubFoo extends Foo
+ ^
+t6162-inheritance.scala:11: warning: inheritance from trait T in package t6126 is deprecated
+object SubT extends T
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/t6162-overriding.check b/test/files/neg/t6162-overriding.check
index e774888d36..6bff75d88d 100644
--- a/test/files/neg/t6162-overriding.check
+++ b/test/files/neg/t6162-overriding.check
@@ -1,7 +1,9 @@
-t6162-overriding.scala:14: error: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version.
+t6162-overriding.scala:14: warning: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version.
override def bar = 43
^
-t6162-overriding.scala:15: error: overriding method baz in class Bar is deprecated
+t6162-overriding.scala:15: warning: overriding method baz in class Bar is deprecated
override def baz = 43
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t6264.check b/test/files/neg/t6264.check
index 438be4c39f..c0975a80b2 100644
--- a/test/files/neg/t6264.check
+++ b/test/files/neg/t6264.check
@@ -1,4 +1,6 @@
-t6264.scala:3: error: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure
+t6264.scala:3: warning: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure
x.isInstanceOf[Tuple2[_, Tuple1[_]]]
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t6276.check b/test/files/neg/t6276.check
index 0b3dfa5531..f275de9d0a 100644
--- a/test/files/neg/t6276.check
+++ b/test/files/neg/t6276.check
@@ -1,19 +1,21 @@
-t6276.scala:4: error: method a in class C does nothing other than call itself recursively
+t6276.scala:4: warning: method a in class C does nothing other than call itself recursively
def a: Any = a // warn
^
-t6276.scala:5: error: value b in class C does nothing other than call itself recursively
+t6276.scala:5: warning: value b in class C does nothing other than call itself recursively
val b: Any = b // warn
^
-t6276.scala:7: error: method c in class C does nothing other than call itself recursively
+t6276.scala:7: warning: method c in class C does nothing other than call itself recursively
def c: Any = this.c // warn
^
-t6276.scala:8: error: method d in class C does nothing other than call itself recursively
+t6276.scala:8: warning: method d in class C does nothing other than call itself recursively
def d: Any = C.this.d // warn
^
-t6276.scala:13: error: method a does nothing other than call itself recursively
+t6276.scala:13: warning: method a does nothing other than call itself recursively
def a: Any = a // warn
^
-t6276.scala:22: error: method a does nothing other than call itself recursively
+t6276.scala:22: warning: method a does nothing other than call itself recursively
def a = a // warn
^
-6 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/t6355.check b/test/files/neg/t6355.check
new file mode 100644
index 0000000000..607829d99a
--- /dev/null
+++ b/test/files/neg/t6355.check
@@ -0,0 +1,7 @@
+t6355.scala:12: error: implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)
+ def applyDynamic(name: String)(x: Int): Int = 2
+ ^
+t6355.scala:18: error: implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)
+ def applyDynamic[T1, T2](name: String)(x: String, y: T1, z: T2): Int = 3
+ ^
+two errors found
diff --git a/test/files/neg/t6355.scala b/test/files/neg/t6355.scala
new file mode 100644
index 0000000000..0500ed04c6
--- /dev/null
+++ b/test/files/neg/t6355.scala
@@ -0,0 +1,19 @@
+package foo
+
+import scala.language.dynamics
+
+class DoesntExtendDynamic {
+ def applyDynamic(name: String)(s: String): Int = 1
+ def applyDynamic(name: String)(x: Int): Int = 2
+}
+
+class A extends Dynamic {
+ def applyDynamic(name: String)(s: String): Int = 1
+ def applyDynamic(name: String)(x: Int): Int = 2
+}
+
+class B extends Dynamic {
+ def applyDynamic[T1](name: String)(x: T1): Int = 1
+ def applyDynamic[T1, T2](name: String)(x: T1, y: T2): Int = 2
+ def applyDynamic[T1, T2](name: String)(x: String, y: T1, z: T2): Int = 3
+}
diff --git a/test/files/neg/t6375.check b/test/files/neg/t6375.check
new file mode 100644
index 0000000000..89d7d8060f
--- /dev/null
+++ b/test/files/neg/t6375.check
@@ -0,0 +1,27 @@
+t6375.scala:6: warning: no valid targets for annotation on value x1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @getter)
+ @Bippy val x1: Int // warn
+ ^
+t6375.scala:7: warning: no valid targets for annotation on value x2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @getter)
+ @(Bippy @field) val x2: Int // warn
+ ^
+t6375.scala:9: warning: no valid targets for annotation on value x4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @getter)
+ @(Bippy @setter) val x4: Int // warn
+ ^
+t6375.scala:10: warning: no valid targets for annotation on value x5 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.param @getter)
+ @(Bippy @param) val x5: Int // warn
+ ^
+t6375.scala:20: warning: no valid targets for annotation on value q1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @field)
+ @(Bippy @getter) private[this] val q1: Int = 1 // warn
+ ^
+t6375.scala:40: warning: no valid targets for annotation on value p2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @param)
+ @(Bippy @getter) p2: Int, // warn
+ ^
+t6375.scala:41: warning: no valid targets for annotation on value p3 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @param)
+ @(Bippy @setter) p3: Int, // warn
+ ^
+t6375.scala:42: warning: no valid targets for annotation on value p4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @param)
+ @(Bippy @field) p4: Int // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/t6375.flags b/test/files/neg/t6375.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t6375.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t6375.scala b/test/files/neg/t6375.scala
new file mode 100644
index 0000000000..21634df688
--- /dev/null
+++ b/test/files/neg/t6375.scala
@@ -0,0 +1,67 @@
+import scala.annotation.meta._
+
+class Bippy extends scala.annotation.StaticAnnotation
+
+abstract class Foo {
+ @Bippy val x1: Int // warn
+ @(Bippy @field) val x2: Int // warn
+ @(Bippy @getter) val x3: Int // no warn
+ @(Bippy @setter) val x4: Int // warn
+ @(Bippy @param) val x5: Int // warn
+}
+
+object Bar extends Foo {
+ val x1 = 1
+ val x2 = 2
+ val x3 = 3
+ val x4 = 4
+ val x5 = 5
+
+ @(Bippy @getter) private[this] val q1: Int = 1 // warn
+ @(Bippy @getter) private val q2: Int = 1 // no warn
+
+ def f1(@(Bippy @param) x: Int): Int = 0 // no warn
+ def f2(@(Bippy @getter) x: Int): Int = 0 // warn - todo
+ def f3(@(Bippy @setter) x: Int): Int = 0 // warn - todo
+ def f4(@(Bippy @field) x: Int): Int = 0 // warn - todo
+ def f5(@Bippy x: Int): Int = 0 // no warn
+
+ @(Bippy @companionClass) def g1(x: Int): Int = 0 // warn - todo
+ @(Bippy @companionObject) def g2(x: Int): Int = 0 // warn - todo
+ @(Bippy @companionMethod) def g3(x: Int): Int = 0 // no warn
+ @Bippy def g4(x: Int): Int = 0 // no warn
+
+ @(Bippy @companionObject @companionMethod) def g5(x: Int): Int = 0 // no warn
+}
+
+class Dingo(
+ @Bippy p0: Int, // no warn
+ @(Bippy @param) p1: Int, // no warn
+ @(Bippy @getter) p2: Int, // warn
+ @(Bippy @setter) p3: Int, // warn
+ @(Bippy @field) p4: Int // warn
+)
+
+class ValDingo(
+ @Bippy val p0: Int, // no warn
+ @(Bippy @param) val p1: Int, // no warn
+ @(Bippy @getter) val p2: Int, // no warn
+ @(Bippy @setter) val p3: Int, // warn - todo
+ @(Bippy @field) val p4: Int // no warn
+)
+
+class VarDingo(
+ @Bippy var p0: Int, // no warn
+ @(Bippy @param) var p1: Int, // no warn
+ @(Bippy @getter) var p2: Int, // no warn
+ @(Bippy @setter) var p3: Int, // no warn
+ @(Bippy @field) var p4: Int // no warn
+)
+
+case class CaseDingo(
+ @Bippy p0: Int, // no warn
+ @(Bippy @param) p1: Int, // no warn
+ @(Bippy @getter) p2: Int, // no warn
+ @(Bippy @setter) p3: Int, // warn - todo
+ @(Bippy @field) p4: Int // no warn
+)
diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check
new file mode 100644
index 0000000000..4fea66f760
--- /dev/null
+++ b/test/files/neg/t6406-regextract.check
@@ -0,0 +1,7 @@
+t6406-regextract.scala:4: error: cannot resolve overloaded unapply
+ List(1) collect { case r(i) => i }
+ ^
+t6406-regextract.scala:4: error: not found: value i
+ List(1) collect { case r(i) => i }
+ ^
+two errors found
diff --git a/test/files/neg/t6406-regextract.flags b/test/files/neg/t6406-regextract.flags
new file mode 100644
index 0000000000..7de3c0f3ee
--- /dev/null
+++ b/test/files/neg/t6406-regextract.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
diff --git a/test/files/neg/t6406-regextract.scala b/test/files/neg/t6406-regextract.scala
new file mode 100644
index 0000000000..0f5dad908d
--- /dev/null
+++ b/test/files/neg/t6406-regextract.scala
@@ -0,0 +1,5 @@
+
+object Test extends App {
+ val r = "(\\d+)".r
+ List(1) collect { case r(i) => i }
+}
diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check
new file mode 100755
index 0000000000..53dd383941
--- /dev/null
+++ b/test/files/neg/t6446-additional.check
@@ -0,0 +1,31 @@
+ phase name id description
+ ---------- -- -----------
+ parser 1 parse source into ASTs, perform simple desugaring
+ namer 2 resolve names, attach symbols to named trees
+packageobjects 3 load package objects
+ typer 4 the meat and potatoes: type the trees
+ patmat 5 translate match expressions
+superaccessors 6 add super accessors in traits and nested classes
+ extmethods 7 add extension methods for inline classes
+ pickler 8 serialize symbol tables
+ refchecks 9 reference/override checking, translate nested objects
+ uncurry 10 uncurry, translate function values to anonymous classes
+ tailcalls 11 replace tail calls by jumps
+ specialize 12 @specialized-driven class and method specialization
+ explicitouter 13 this refs to outer pointers, translate patterns
+ erasure 14 erase types, add interfaces for traits
+ posterasure 15 clean up erased inline classes
+ lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ lambdalift 17 move nested functions to top level
+ constructors 18 move field definitions into constructors
+ flatten 19 eliminate inner classes
+ mixin 20 mixin composition
+ cleanup 21 platform-specific cleanups, generate reflective calls
+ icode 22 generate portable intermediate code
+ inliner 23 optimization: do inlining
+inlinehandlers 24 optimization: inline exception handlers
+ closelim 25 optimization: eliminate uncalled closures
+ dce 26 optimization: eliminate dead code
+ jvm 27 generate JVM bytecode
+ ploogin 28 A sample phase that does so many things it's kind of hard...
+ terminal 29 The last phase in the compiler chain
diff --git a/test/files/neg/t6446-additional/ploogin_1.scala b/test/files/neg/t6446-additional/ploogin_1.scala
new file mode 100644
index 0000000000..ed6adfc1cf
--- /dev/null
+++ b/test/files/neg/t6446-additional/ploogin_1.scala
@@ -0,0 +1,31 @@
+
+package t6446
+
+import scala.tools.nsc.{ Global, Phase }
+import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
+import scala.reflect.io.Path
+import scala.reflect.io.File
+
+/** A test plugin. */
+class Ploogin(val global: Global) extends Plugin {
+ import global._
+
+ val name = "ploogin"
+ val description = "A sample plugin for testing."
+ val components = List[PluginComponent](TestComponent)
+
+ private object TestComponent extends PluginComponent {
+ val global: Ploogin.this.global.type = Ploogin.this.global
+ //override val runsBefore = List("refchecks")
+ val runsAfter = List("jvm")
+ val phaseName = Ploogin.this.name
+ override def description = "A sample phase that does so many things it's kind of hard to describe briefly."
+ def newPhase(prev: Phase) = new TestPhase(prev)
+ class TestPhase(prev: Phase) extends StdPhase(prev) {
+ override def description = TestComponent.this.description
+ def apply(unit: CompilationUnit) {
+ // kewl kode
+ }
+ }
+ }
+}
diff --git a/test/files/neg/t6446-additional/sample_2.flags b/test/files/neg/t6446-additional/sample_2.flags
new file mode 100644
index 0000000000..4d518c2286
--- /dev/null
+++ b/test/files/neg/t6446-additional/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xshow-phases
diff --git a/test/files/neg/t6446-additional/sample_2.scala b/test/files/neg/t6446-additional/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t6446-additional/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t6446-additional/scalac-plugin.xml b/test/files/neg/t6446-additional/scalac-plugin.xml
new file mode 100644
index 0000000000..e849bb5919
--- /dev/null
+++ b/test/files/neg/t6446-additional/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>sample-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t6446-list.check b/test/files/neg/t6446-list.check
new file mode 100755
index 0000000000..fa5c581941
--- /dev/null
+++ b/test/files/neg/t6446-list.check
@@ -0,0 +1 @@
+ploogin - A sample plugin for testing.
diff --git a/test/files/neg/t6446-list/ploogin_1.scala b/test/files/neg/t6446-list/ploogin_1.scala
new file mode 100644
index 0000000000..ed6adfc1cf
--- /dev/null
+++ b/test/files/neg/t6446-list/ploogin_1.scala
@@ -0,0 +1,31 @@
+
+package t6446
+
+import scala.tools.nsc.{ Global, Phase }
+import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
+import scala.reflect.io.Path
+import scala.reflect.io.File
+
+/** A test plugin. */
+class Ploogin(val global: Global) extends Plugin {
+ import global._
+
+ val name = "ploogin"
+ val description = "A sample plugin for testing."
+ val components = List[PluginComponent](TestComponent)
+
+ private object TestComponent extends PluginComponent {
+ val global: Ploogin.this.global.type = Ploogin.this.global
+ //override val runsBefore = List("refchecks")
+ val runsAfter = List("jvm")
+ val phaseName = Ploogin.this.name
+ override def description = "A sample phase that does so many things it's kind of hard to describe briefly."
+ def newPhase(prev: Phase) = new TestPhase(prev)
+ class TestPhase(prev: Phase) extends StdPhase(prev) {
+ override def description = TestComponent.this.description
+ def apply(unit: CompilationUnit) {
+ // kewl kode
+ }
+ }
+ }
+}
diff --git a/test/files/neg/t6446-list/sample_2.flags b/test/files/neg/t6446-list/sample_2.flags
new file mode 100644
index 0000000000..9cb3232964
--- /dev/null
+++ b/test/files/neg/t6446-list/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xplugin-list
diff --git a/test/files/neg/t6446-list/sample_2.scala b/test/files/neg/t6446-list/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t6446-list/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t6446-list/scalac-plugin.xml b/test/files/neg/t6446-list/scalac-plugin.xml
new file mode 100644
index 0000000000..e849bb5919
--- /dev/null
+++ b/test/files/neg/t6446-list/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>sample-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check
new file mode 100755
index 0000000000..f976bf480e
--- /dev/null
+++ b/test/files/neg/t6446-missing.check
@@ -0,0 +1,31 @@
+Warning: class not found: t6446.Ploogin
+ phase name id description
+ ---------- -- -----------
+ parser 1 parse source into ASTs, perform simple desugaring
+ namer 2 resolve names, attach symbols to named trees
+packageobjects 3 load package objects
+ typer 4 the meat and potatoes: type the trees
+ patmat 5 translate match expressions
+superaccessors 6 add super accessors in traits and nested classes
+ extmethods 7 add extension methods for inline classes
+ pickler 8 serialize symbol tables
+ refchecks 9 reference/override checking, translate nested objects
+ uncurry 10 uncurry, translate function values to anonymous classes
+ tailcalls 11 replace tail calls by jumps
+ specialize 12 @specialized-driven class and method specialization
+ explicitouter 13 this refs to outer pointers, translate patterns
+ erasure 14 erase types, add interfaces for traits
+ posterasure 15 clean up erased inline classes
+ lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ lambdalift 17 move nested functions to top level
+ constructors 18 move field definitions into constructors
+ flatten 19 eliminate inner classes
+ mixin 20 mixin composition
+ cleanup 21 platform-specific cleanups, generate reflective calls
+ icode 22 generate portable intermediate code
+ inliner 23 optimization: do inlining
+inlinehandlers 24 optimization: inline exception handlers
+ closelim 25 optimization: eliminate uncalled closures
+ dce 26 optimization: eliminate dead code
+ jvm 27 generate JVM bytecode
+ terminal 28 The last phase in the compiler chain
diff --git a/test/files/neg/t6446-missing/sample_2.flags b/test/files/neg/t6446-missing/sample_2.flags
new file mode 100644
index 0000000000..4d518c2286
--- /dev/null
+++ b/test/files/neg/t6446-missing/sample_2.flags
@@ -0,0 +1 @@
+-Xplugin:. -Xshow-phases
diff --git a/test/files/neg/t6446-missing/sample_2.scala b/test/files/neg/t6446-missing/sample_2.scala
new file mode 100644
index 0000000000..73cdc64e40
--- /dev/null
+++ b/test/files/neg/t6446-missing/sample_2.scala
@@ -0,0 +1,6 @@
+
+package sample
+
+// just a sample that is compiled with the sample plugin enabled
+object Sample extends App {
+}
diff --git a/test/files/neg/t6446-missing/scalac-plugin.xml b/test/files/neg/t6446-missing/scalac-plugin.xml
new file mode 100644
index 0000000000..9c34d63f83
--- /dev/null
+++ b/test/files/neg/t6446-missing/scalac-plugin.xml
@@ -0,0 +1,4 @@
+<plugin>
+<name>missing-plugin</name>
+<classname>t6446.Ploogin</classname>
+</plugin>
diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check
new file mode 100644
index 0000000000..5bbe43990c
--- /dev/null
+++ b/test/files/neg/t6446-show-phases.check
@@ -0,0 +1,30 @@
+ phase name id description
+ ---------- -- -----------
+ parser 1 parse source into ASTs, perform simple desugaring
+ namer 2 resolve names, attach symbols to named trees
+packageobjects 3 load package objects
+ typer 4 the meat and potatoes: type the trees
+ patmat 5 translate match expressions
+superaccessors 6 add super accessors in traits and nested classes
+ extmethods 7 add extension methods for inline classes
+ pickler 8 serialize symbol tables
+ refchecks 9 reference/override checking, translate nested objects
+ uncurry 10 uncurry, translate function values to anonymous classes
+ tailcalls 11 replace tail calls by jumps
+ specialize 12 @specialized-driven class and method specialization
+ explicitouter 13 this refs to outer pointers, translate patterns
+ erasure 14 erase types, add interfaces for traits
+ posterasure 15 clean up erased inline classes
+ lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ lambdalift 17 move nested functions to top level
+ constructors 18 move field definitions into constructors
+ flatten 19 eliminate inner classes
+ mixin 20 mixin composition
+ cleanup 21 platform-specific cleanups, generate reflective calls
+ icode 22 generate portable intermediate code
+ inliner 23 optimization: do inlining
+inlinehandlers 24 optimization: inline exception handlers
+ closelim 25 optimization: eliminate uncalled closures
+ dce 26 optimization: eliminate dead code
+ jvm 27 generate JVM bytecode
+ terminal 28 The last phase in the compiler chain
diff --git a/test/files/neg/t6446-show-phases.flags b/test/files/neg/t6446-show-phases.flags
new file mode 100644
index 0000000000..845666e100
--- /dev/null
+++ b/test/files/neg/t6446-show-phases.flags
@@ -0,0 +1 @@
+-Xshow-phases
diff --git a/test/files/neg/t6446-show-phases.scala b/test/files/neg/t6446-show-phases.scala
new file mode 100644
index 0000000000..a9afb042d2
--- /dev/null
+++ b/test/files/neg/t6446-show-phases.scala
@@ -0,0 +1,3 @@
+
+// testing compiler flag output only
+object Test extends App
diff --git a/test/files/neg/t6566a.check b/test/files/neg/t6566a.check
new file mode 100644
index 0000000000..7668f9d2fb
--- /dev/null
+++ b/test/files/neg/t6566a.check
@@ -0,0 +1,4 @@
+t6566a.scala:2: error: covariant type T occurs in invariant position in type T of type MyType
+ class TypeCheat[+T] { type MyType = T }
+ ^
+one error found
diff --git a/test/files/neg/t6566a.scala b/test/files/neg/t6566a.scala
new file mode 100644
index 0000000000..74a0b38e56
--- /dev/null
+++ b/test/files/neg/t6566a.scala
@@ -0,0 +1,17 @@
+object WhatsYourTypeIsMyType {
+ class TypeCheat[+T] { type MyType = T }
+
+ class Foo {
+ val tc = new TypeCheat[Foo]
+ var x: tc.MyType = _
+ def setX() = x = new Foo
+ }
+ class Bar extends Foo {
+ override val tc = new TypeCheat[Bar]
+ def unsound = this
+
+ setX()
+ println(x.unsound)
+ }
+ def main(args: Array[String]): Unit = new Bar
+}
diff --git a/test/files/neg/t6566b.check b/test/files/neg/t6566b.check
new file mode 100644
index 0000000000..fb3fe81fca
--- /dev/null
+++ b/test/files/neg/t6566b.check
@@ -0,0 +1,4 @@
+t6566b.scala:3: error: covariant type T occurs in invariant position in type T of type MyType
+ type MyType = T
+ ^
+one error found
diff --git a/test/files/neg/t6566b.scala b/test/files/neg/t6566b.scala
new file mode 100644
index 0000000000..18ddebf88b
--- /dev/null
+++ b/test/files/neg/t6566b.scala
@@ -0,0 +1,19 @@
+object WhatsYourTypeIsMyType {
+ trait WithMyType[+T] {
+ type MyType = T
+ }
+
+ class Foo extends WithMyType[Foo] {
+ var x: MyType = _
+ def setX() = x = new Foo
+ }
+
+ class Bar extends Foo with WithMyType[Bar] {
+ def unsound { println("iAmABar") }
+
+ setX()
+ println(x.unsound)
+ }
+
+ def main(args: Array[String]): Unit = new Bar
+}
diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check
index 4c513e64cd..a733d75354 100644
--- a/test/files/neg/t6567.check
+++ b/test/files/neg/t6567.check
@@ -1,7 +1,9 @@
-t6567.scala:8: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+t6567.scala:8: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
Option[B](a)
^
-t6567.scala:10: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+t6567.scala:10: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
val b: Option[B] = Option(a)
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/t6667.check b/test/files/neg/t6667.check
index b04251d7c1..43313fa4fe 100644
--- a/test/files/neg/t6667.check
+++ b/test/files/neg/t6667.check
@@ -1,5 +1,4 @@
-t6667.scala:8: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667.
-ambiguous implicit values:
+t6667.scala:8: error: ambiguous implicit values:
both value inScope1 in object Test of type => C
and value inScope2 in object Test of type => C
match expected type C
diff --git a/test/files/neg/t6667.flags b/test/files/neg/t6667.flags
deleted file mode 100644
index 6c1dd108ae..0000000000
--- a/test/files/neg/t6667.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings -Xlint \ No newline at end of file
diff --git a/test/files/neg/t6667b.check b/test/files/neg/t6667b.check
index 5d56e776c3..99cea9a47c 100644
--- a/test/files/neg/t6667b.check
+++ b/test/files/neg/t6667b.check
@@ -4,8 +4,7 @@ t6667b.scala:16: error: ambiguous implicit values:
match expected type Test.Box
new Test()
^
-t6667b.scala:19: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667.
-ambiguous implicit values:
+t6667b.scala:19: error: ambiguous implicit values:
both value a in object Test of type => Test.Box
and value b of type Test.Box
match expected type Test.Box
diff --git a/test/files/neg/t6667b.flags b/test/files/neg/t6667b.flags
deleted file mode 100644
index 6c1dd108ae..0000000000
--- a/test/files/neg/t6667b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings -Xlint \ No newline at end of file
diff --git a/test/files/neg/t667.check b/test/files/neg/t667.check
index d4367bc87b..e68c6dea00 100644
--- a/test/files/neg/t667.check
+++ b/test/files/neg/t667.check
@@ -1,4 +1,4 @@
-t667.scala:8: error: class Ni inherits itself
+t667.scala:8: error: illegal cyclic reference involving class Ni
class Ni extends super.Ni with Ni;
- ^
+ ^
one error found
diff --git a/test/files/neg/t6675-old-patmat.check b/test/files/neg/t6675-old-patmat.check
deleted file mode 100644
index bc3920da06..0000000000
--- a/test/files/neg/t6675-old-patmat.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t6675-old-patmat.scala:10: error: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
- "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
- ^
-one error found
diff --git a/test/files/neg/t6675-old-patmat.flags b/test/files/neg/t6675-old-patmat.flags
deleted file mode 100644
index 604de64a62..0000000000
--- a/test/files/neg/t6675-old-patmat.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xlint -Xfatal-warnings -Xoldpatmat \ No newline at end of file
diff --git a/test/files/neg/t6675-old-patmat.scala b/test/files/neg/t6675-old-patmat.scala
deleted file mode 100644
index 4d500b77ba..0000000000
--- a/test/files/neg/t6675-old-patmat.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object X {
- def unapply(s: String): Option[(Int,Int,Int)] = Some((1,2,3))
-}
-
-object Y {
- def unapplySeq(s: String): Option[Seq[(Int,Int,Int)]] = Some(Seq((1,2,3)))
-}
-
-object Test {
- "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
-
- "" match { case Y(b) => b } // no warning
-}
diff --git a/test/files/neg/t6675.check b/test/files/neg/t6675.check
index 7b271de213..3a277af866 100644
--- a/test/files/neg/t6675.check
+++ b/test/files/neg/t6675.check
@@ -1,4 +1,6 @@
-t6675.scala:10: error: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
+t6675.scala:10: warning: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
"" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t6902.check b/test/files/neg/t6902.check
index 8ad7fd37f9..ed0ed75303 100644
--- a/test/files/neg/t6902.check
+++ b/test/files/neg/t6902.check
@@ -1,10 +1,12 @@
-t6902.scala:4: error: unreachable code
+t6902.scala:4: warning: unreachable code
case Some(b) => 3 // no warning was emitted
^
-t6902.scala:9: error: unreachable code
+t6902.scala:9: warning: unreachable code
case Some(b) => 3 // no warning was emitted
^
-t6902.scala:21: error: unreachable code
+t6902.scala:21: warning: unreachable code
case 1 => 3 // crash
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t6963a.check b/test/files/neg/t6963a.check
index 159896fd10..5858e7740a 100644
--- a/test/files/neg/t6963a.check
+++ b/test/files/neg/t6963a.check
@@ -1,5 +1,7 @@
-t6963a.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+t6963a.scala:4: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
List(1,2,3,4,5).scanRight(0)(_+_)
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/t6963b.check b/test/files/neg/t6963b.check
deleted file mode 100644
index 7e205a41d0..0000000000
--- a/test/files/neg/t6963b.check
+++ /dev/null
@@ -1,13 +0,0 @@
-t6963b.scala:2: error: An Array will no longer match as Seq[_].
- def f1(x: Any) = x.isInstanceOf[Seq[_]]
- ^
-t6963b.scala:4: error: An Array will no longer match as Seq[_].
- case _: Seq[_] => true
- ^
-t6963b.scala:16: error: An Array will no longer match as Seq[_].
- case (Some(_: Seq[_]), Nil, _) => 1
- ^
-t6963b.scala:17: error: An Array will no longer match as Seq[_].
- case (None, List(_: List[_], _), _) => 2
- ^
-four errors found
diff --git a/test/files/neg/t6963b.flags b/test/files/neg/t6963b.flags
deleted file mode 100644
index 83caa2b147..0000000000
--- a/test/files/neg/t6963b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xmigration:2.7 -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6963b.scala b/test/files/neg/t6963b.scala
deleted file mode 100644
index 3cfa8f0dca..0000000000
--- a/test/files/neg/t6963b.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-object Test {
- def f1(x: Any) = x.isInstanceOf[Seq[_]]
- def f2(x: Any) = x match {
- case _: Seq[_] => true
- case _ => false
- }
-
- def f3(x: Any) = x match {
- case _: Array[_] => true
- case _ => false
- }
-
- def f4(x: Any) = x.isInstanceOf[Traversable[_]]
-
- def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
- case (Some(_: Seq[_]), Nil, _) => 1
- case (None, List(_: List[_], _), _) => 2
- case _ => 3
- }
-}
diff --git a/test/files/neg/t7171.check b/test/files/neg/t7171.check
index 8bdf08129b..ecd768afda 100644
--- a/test/files/neg/t7171.check
+++ b/test/files/neg/t7171.check
@@ -1,7 +1,6 @@
-t7171.scala:2: error: The outer reference in this type test cannot be checked at run time.
+t7171.scala:2: warning: The outer reference in this type test cannot be checked at run time.
final case class A()
^
-t7171.scala:9: error: The outer reference in this type test cannot be checked at run time.
- case _: A => true; case _ => false
- ^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t7171b.check b/test/files/neg/t7171b.check
index bd6b2bcfb4..bf695afea7 100644
--- a/test/files/neg/t7171b.check
+++ b/test/files/neg/t7171b.check
@@ -1,10 +1,6 @@
-t7171b.scala:2: error: The outer reference in this type test cannot be checked at run time.
+t7171b.scala:2: warning: The outer reference in this type test cannot be checked at run time.
final case class A()
^
-t7171b.scala:8: error: The outer reference in this type test cannot be checked at run time.
- case _: A => true; case _ => false
- ^
-t7171b.scala:13: error: The outer reference in this type test cannot be checked at run time.
- case _: A => true; case _ => false
- ^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
+one error found
diff --git a/test/files/neg/t877.check b/test/files/neg/t877.check
index 5f25bd439c..c3d4ab6584 100644
--- a/test/files/neg/t877.check
+++ b/test/files/neg/t877.check
@@ -1,7 +1,7 @@
t877.scala:3: error: Invalid literal number
trait Foo extends A(22A, Bug!) {}
^
-t877.scala:3: error: parents of traits may not have parameters
+t877.scala:3: error: ')' expected but eof found.
trait Foo extends A(22A, Bug!) {}
- ^
+ ^
two errors found
diff --git a/test/files/neg/unchecked-abstract.check b/test/files/neg/unchecked-abstract.check
index 6e811dc156..72019082ac 100644
--- a/test/files/neg/unchecked-abstract.check
+++ b/test/files/neg/unchecked-abstract.check
@@ -1,25 +1,27 @@
-unchecked-abstract.scala:16: error: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:16: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Contravariant[H]])
^
-unchecked-abstract.scala:21: error: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:21: warning: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Contravariant[H]])
^
-unchecked-abstract.scala:27: error: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:27: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
-unchecked-abstract.scala:28: error: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:28: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[L]])
^
-unchecked-abstract.scala:31: error: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:31: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[H]])
^
-unchecked-abstract.scala:33: error: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:33: warning: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[L]])
^
-unchecked-abstract.scala:36: error: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:36: warning: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[H]])
^
-unchecked-abstract.scala:37: error: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
+unchecked-abstract.scala:37: warning: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
/* warn */ println(x.isInstanceOf[Invariant[T]])
^
-8 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+8 warnings found
+one error found
diff --git a/test/files/neg/unchecked-impossible.check b/test/files/neg/unchecked-impossible.check
index 0ab371dbaa..d150a5a853 100644
--- a/test/files/neg/unchecked-impossible.check
+++ b/test/files/neg/unchecked-impossible.check
@@ -1,4 +1,10 @@
-unchecked-impossible.scala:5: error: fruitless type test: a value of type T2[Int,Int] cannot also be a Seq[A]
+unchecked-impossible.scala:5: warning: fruitless type test: a value of type T2[Int,Int] cannot also be a Seq[A]
case Seq(x) =>
^
+unchecked-impossible.scala:5: error: pattern type is incompatible with expected type;
+ found : Seq[A]
+ required: T2[Int,Int]
+ case Seq(x) =>
+ ^
+one warning found
one error found
diff --git a/test/files/neg/unchecked-knowable.check b/test/files/neg/unchecked-knowable.check
index d279427327..327a5f202d 100644
--- a/test/files/neg/unchecked-knowable.check
+++ b/test/files/neg/unchecked-knowable.check
@@ -1,7 +1,9 @@
-unchecked-knowable.scala:18: error: fruitless type test: a value of type Bippy cannot also be a A1
+unchecked-knowable.scala:18: warning: fruitless type test: a value of type Bippy cannot also be a A1
/* warn */ (new Bippy).isInstanceOf[A1]
^
-unchecked-knowable.scala:19: error: fruitless type test: a value of type Bippy cannot also be a B1
+unchecked-knowable.scala:19: warning: fruitless type test: a value of type Bippy cannot also be a B1
/* warn */ (new Bippy).isInstanceOf[B1]
^
-two errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
+one error found
diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check
index d81517464f..e85a51f44d 100644
--- a/test/files/neg/unchecked-refinement.check
+++ b/test/files/neg/unchecked-refinement.check
@@ -1,13 +1,15 @@
-unchecked-refinement.scala:17: error: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure
+unchecked-refinement.scala:17: warning: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure
/* warn */ case _: Foo[U, U, V] if b => ()
^
-unchecked-refinement.scala:19: error: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure
+unchecked-refinement.scala:19: warning: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure
/* warn */ case _: Foo[Any, U, V] if b => ()
^
-unchecked-refinement.scala:23: error: a pattern match on a refinement type is unchecked
+unchecked-refinement.scala:23: warning: a pattern match on a refinement type is unchecked
/* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn
^
-unchecked-refinement.scala:24: error: a pattern match on a refinement type is unchecked
+unchecked-refinement.scala:24: warning: a pattern match on a refinement type is unchecked
/* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/unchecked-suppress.check b/test/files/neg/unchecked-suppress.check
index 2e23d21386..038105918e 100644
--- a/test/files/neg/unchecked-suppress.check
+++ b/test/files/neg/unchecked-suppress.check
@@ -1,10 +1,12 @@
-unchecked-suppress.scala:4: error: non-variable type argument Int in type pattern Set[Int] is unchecked since it is eliminated by erasure
+unchecked-suppress.scala:4: warning: non-variable type argument Int in type pattern Set[Int] is unchecked since it is eliminated by erasure
case xs: Set[Int] => xs.head // unchecked
^
-unchecked-suppress.scala:5: error: non-variable type argument String in type pattern Map[String @unchecked,String] is unchecked since it is eliminated by erasure
+unchecked-suppress.scala:5: warning: non-variable type argument String in type pattern Map[String @unchecked,String] is unchecked since it is eliminated by erasure
case xs: Map[String @unchecked, String] => xs.head // one unchecked, one okay
^
-unchecked-suppress.scala:7: error: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure
+unchecked-suppress.scala:7: warning: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure
case f: ((Int, Int) => Int) => // unchecked
^
-three errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/unchecked.check b/test/files/neg/unchecked.check
index 2883b716c9..570f02f219 100644
--- a/test/files/neg/unchecked.check
+++ b/test/files/neg/unchecked.check
@@ -1,19 +1,21 @@
-unchecked.scala:18: error: non-variable type argument String in type pattern Iterable[String] is unchecked since it is eliminated by erasure
+unchecked.scala:18: warning: non-variable type argument String in type pattern Iterable[String] is unchecked since it is eliminated by erasure
case xs: Iterable[String] => xs.head // unchecked
^
-unchecked.scala:22: error: non-variable type argument Any in type pattern Set[Any] is unchecked since it is eliminated by erasure
+unchecked.scala:22: warning: non-variable type argument Any in type pattern Set[Any] is unchecked since it is eliminated by erasure
case xs: Set[Any] => xs.head // unchecked
^
-unchecked.scala:26: error: non-variable type argument Any in type pattern Map[Any,Any] is unchecked since it is eliminated by erasure
+unchecked.scala:26: warning: non-variable type argument Any in type pattern Map[Any,Any] is unchecked since it is eliminated by erasure
case xs: Map[Any, Any] => xs.head // unchecked
^
-unchecked.scala:35: error: non-variable type argument List[Nothing] in type pattern Test.Contra[List[Nothing]] is unchecked since it is eliminated by erasure
+unchecked.scala:35: warning: non-variable type argument List[Nothing] in type pattern Test.Contra[List[Nothing]] is unchecked since it is eliminated by erasure
case xs: Contra[List[Nothing]] => xs.head // unchecked
^
-unchecked.scala:50: error: non-variable type argument String in type pattern Test.Exp[String] is unchecked since it is eliminated by erasure
+unchecked.scala:50: warning: non-variable type argument String in type pattern Test.Exp[String] is unchecked since it is eliminated by erasure
case ArrayApply(x: Exp[Array[T]], _, j: Exp[String]) => x // unchecked
^
-unchecked.scala:55: error: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure
+unchecked.scala:55: warning: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure
case ArrayApply(x: Exp[Array[T]], _, _) => x // unchecked
^
-6 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+6 warnings found
+one error found
diff --git a/test/files/neg/unchecked2.check b/test/files/neg/unchecked2.check
index 68fdfa82ac..a7b8391856 100644
--- a/test/files/neg/unchecked2.check
+++ b/test/files/neg/unchecked2.check
@@ -1,43 +1,45 @@
-unchecked2.scala:4: error: fruitless type test: a value of type Some[List[Int]] cannot also be a Option[List[String]] (but still might match its erasure)
+unchecked2.scala:4: warning: fruitless type test: a value of type Some[List[Int]] cannot also be a Option[List[String]] (but still might match its erasure)
/* warn */ Some(List(1)).isInstanceOf[Option[List[String]]]
^
-unchecked2.scala:5: error: non-variable type argument Option[_] in type Option[Option[_]] is unchecked since it is eliminated by erasure
+unchecked2.scala:5: warning: non-variable type argument Option[_] in type Option[Option[_]] is unchecked since it is eliminated by erasure
/* warn */ Some(123).isInstanceOf[Option[Option[_]]]
^
-unchecked2.scala:6: error: fruitless type test: a value of type Some[Int] cannot also be a Option[String] (but still might match its erasure)
+unchecked2.scala:6: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[String] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[String]]
^
-unchecked2.scala:7: error: fruitless type test: a value of type Some[Int] cannot also be a Option[List[String]] (but still might match its erasure)
+unchecked2.scala:7: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[List[String]] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[List[String]]]
^
-unchecked2.scala:8: error: fruitless type test: a value of type Some[Int] cannot also be a Option[List[Int => String]] (but still might match its erasure)
+unchecked2.scala:8: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[List[Int => String]] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[List[Int => String]]]
^
-unchecked2.scala:9: error: fruitless type test: a value of type Some[Int] cannot also be a Option[(String, Double)] (but still might match its erasure)
+unchecked2.scala:9: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[(String, Double)] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[(String, Double)]]
^
-unchecked2.scala:10: error: fruitless type test: a value of type Some[Int] cannot also be a Option[String => Double] (but still might match its erasure)
+unchecked2.scala:10: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[String => Double] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[String => Double]]
^
-unchecked2.scala:14: error: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
+unchecked2.scala:14: warning: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
/* warn */ (Some(List(1)): Any).isInstanceOf[Option[List[String]]]
^
-unchecked2.scala:15: error: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure
+unchecked2.scala:15: warning: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[Int]]
^
-unchecked2.scala:16: error: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure
+unchecked2.scala:16: warning: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[String]]
^
-unchecked2.scala:17: error: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
+unchecked2.scala:17: warning: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[List[String]]]
^
-unchecked2.scala:18: error: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure
+unchecked2.scala:18: warning: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[List[Int => String]]]
^
-unchecked2.scala:19: error: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure
+unchecked2.scala:19: warning: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[(String, Double)]]
^
-unchecked2.scala:20: error: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure
+unchecked2.scala:20: warning: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[String => Double]]
^
-14 errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+14 warnings found
+one error found
diff --git a/test/files/neg/unchecked3.check b/test/files/neg/unchecked3.check
index f4f0c74257..a7582a8930 100644
--- a/test/files/neg/unchecked3.check
+++ b/test/files/neg/unchecked3.check
@@ -1,37 +1,42 @@
-unchecked3.scala:24: error: non-variable type argument Double in type pattern E1[Double] is unchecked since it is eliminated by erasure
+unchecked3.scala:24: warning: non-variable type argument Double in type pattern E1[Double] is unchecked since it is eliminated by erasure
/* warn */ def peerTypes2(x: B1[Int]) = x match { case _: E1[Double] => true }
^
-unchecked3.scala:25: error: non-variable type argument Double in type pattern F1[Double] is unchecked since it is eliminated by erasure
+unchecked3.scala:25: warning: non-variable type argument Double in type pattern F1[Double] is unchecked since it is eliminated by erasure
/* warn */ def peerTypes3(x: B1[_]) = x match { case _: F1[Double] => true }
^
-unchecked3.scala:28: error: non-variable type argument Int in type pattern A2[Int] is unchecked since it is eliminated by erasure
+unchecked3.scala:28: warning: non-variable type argument Int in type pattern A2[Int] is unchecked since it is eliminated by erasure
/* warn */ def twotypes1[T](x: B2[T, Int]) = x match { case _: A2[Int] => true }
^
-unchecked3.scala:32: error: non-variable type argument Int in type pattern B2[_,Int] is unchecked since it is eliminated by erasure
+unchecked3.scala:32: warning: non-variable type argument Int in type pattern B2[_,Int] is unchecked since it is eliminated by erasure
/* warn */ def twotypes5[T](x: A2[T]) = x match { case _: B2[_, Int] => true }
^
-unchecked3.scala:40: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+unchecked3.scala:40: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
/* warn */ case _: Array[List[String]] => ()
^
-unchecked3.scala:43: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:43: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
/* warn */ case _: Array[Array[List[String]]] => ()
^
-unchecked3.scala:50: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+unchecked3.scala:50: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
/* warn */ case _: Array[List[String]] => ()
^
-unchecked3.scala:53: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:53: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
/* warn */ case _: Array[Array[List[String]]] => ()
^
-unchecked3.scala:60: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+unchecked3.scala:60: warning: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
/* warn */ case _: Array[List[String]] => ()
^
-unchecked3.scala:62: error: non-variable type argument Array[String] in type pattern Array[List[Array[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:62: warning: non-variable type argument Array[String] in type pattern Array[List[Array[String]]] is unchecked since it is eliminated by erasure
/* warn */ case _: Array[List[Array[String]]] => ()
^
-unchecked3.scala:63: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+unchecked3.scala:63: warning: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
/* warn */ case _: Array[Array[List[String]]] => ()
^
-unchecked3.scala:75: error: abstract type A in type pattern Set[Q.this.A] is unchecked since it is eliminated by erasure
+unchecked3.scala:75: warning: abstract type A in type pattern Set[Q.this.A] is unchecked since it is eliminated by erasure
/* warn */ case xs: Set[A] => xs.head
^
-12 errors found
+unchecked3.scala:62: warning: unreachable code
+ /* warn */ case _: Array[List[Array[String]]] => ()
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+13 warnings found
+one error found
diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check
index ab458a350b..f30a506ebe 100644
--- a/test/files/neg/unit-returns-value.check
+++ b/test/files/neg/unit-returns-value.check
@@ -1,7 +1,15 @@
-unit-returns-value.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+unit-returns-value.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
if (b) return 5
^
-unit-returns-value.scala:4: error: enclosing method f has result type Unit: return value discarded
+unit-returns-value.scala:4: warning: enclosing method f has result type Unit: return value discarded
if (b) return 5
^
-two errors found
+unit-returns-value.scala:22: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ i1 // warn
+ ^
+unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ i2 // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/unit-returns-value.scala b/test/files/neg/unit-returns-value.scala
index ecc981f217..fc5a37069f 100644
--- a/test/files/neg/unit-returns-value.scala
+++ b/test/files/neg/unit-returns-value.scala
@@ -3,9 +3,30 @@ object Test {
var b = false
if (b) return 5
}
-
+
// no warning
def g {
return println("hello")
}
}
+
+class UnusedValues {
+ var i1 = 2
+ val i2 = 2
+ lazy val i3 = 2
+ object i4 { }
+ def i5 = 2
+ final def i6 = 2
+
+ def x = {
+ i1 // warn
+ i2 // warn
+ i3 // no warn
+ i4 // no warn
+ i5 // no warn
+ i6 // could warn someday, if i6 returned 2.type instead of Int
+
+ 5
+ }
+}
+
diff --git a/test/files/neg/unreachablechar.check b/test/files/neg/unreachablechar.check
index 58ce1a7e91..121f12a0c7 100644
--- a/test/files/neg/unreachablechar.check
+++ b/test/files/neg/unreachablechar.check
@@ -1,4 +1,9 @@
-unreachablechar.scala:5: error: unreachable code
+unreachablechar.scala:4: warning: patterns after a variable pattern cannot match (SLS 8.1.1)
+ case _ => println("stuff");
+ ^
+unreachablechar.scala:5: warning: unreachable code due to variable pattern on line 4
case 'f' => println("not stuff?");
^
+error: No warnings can be incurred under -Xfatal-warnings.
+two warnings found
one error found
diff --git a/test/files/neg/unreachablechar.flags b/test/files/neg/unreachablechar.flags
index 809e9ff2f2..85d8eb2ba2 100644
--- a/test/files/neg/unreachablechar.flags
+++ b/test/files/neg/unreachablechar.flags
@@ -1 +1 @@
- -Xoldpatmat
+-Xfatal-warnings
diff --git a/test/files/neg/variances-refinement.check b/test/files/neg/variances-refinement.check
new file mode 100644
index 0000000000..2bed3ffa6b
--- /dev/null
+++ b/test/files/neg/variances-refinement.check
@@ -0,0 +1,22 @@
+variances-refinement.scala:17: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: A): A} of method fail1
+ def fail1() = { object O { def f0(x: A): A = ??? } ; O } // fail
+ ^
+variances-refinement.scala:18: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): A} of method fail2
+ def fail2() = { object O { def f0(x: B): A = ??? } ; O } // fail
+ ^
+variances-refinement.scala:19: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): B} of method fail3
+ def fail3() = { object O { def f0(x: B): B = ??? } ; O } // fail
+ ^
+variances-refinement.scala:20: error: covariant type B occurs in contravariant position in type ()AnyRef{def f0(x: B): C} of method fail4
+ def fail4() = { object O { def f0(x: B): C = ??? } ; O } // fail
+ ^
+variances-refinement.scala:21: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: C): A} of method fail5
+ def fail5() = { object O { def f0(x: C): A = ??? } ; O } // fail
+ ^
+variances-refinement.scala:23: error: contravariant type A occurs in covariant position in type ()O1.type forSome { val O1: AnyRef with O0; type O0 <: AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} } of method fail6
+ def fail6() = { // fail
+ ^
+variances-refinement.scala:32: error: contravariant type A occurs in covariant position in type ()AnyRef{def f0(x: A): A; def f1(x: A): B; def f2(x: A): C} of method fail7
+ def fail7() = { // fail
+ ^
+7 errors found
diff --git a/test/files/neg/variances-refinement.scala b/test/files/neg/variances-refinement.scala
new file mode 100644
index 0000000000..6bfd336ce0
--- /dev/null
+++ b/test/files/neg/variances-refinement.scala
@@ -0,0 +1,40 @@
+trait Trait[-A, +B, C] {
+ def ok() = { // ok
+ object O {
+ private def f0(x: A): A = ???
+ def f1(x: A): B = ???
+ def f2(x: A): C = ???
+ private def f3(x: B): A = ???
+ private def f4(x: B): B = ???
+ private def f5(x: B): C = ???
+ private def f6(x: C): A = ???
+ def f7(x: C): B = ???
+ def f8(x: C): C = ???
+ }
+ O
+ }
+
+ def fail1() = { object O { def f0(x: A): A = ??? } ; O } // fail
+ def fail2() = { object O { def f0(x: B): A = ??? } ; O } // fail
+ def fail3() = { object O { def f0(x: B): B = ??? } ; O } // fail
+ def fail4() = { object O { def f0(x: B): C = ??? } ; O } // fail
+ def fail5() = { object O { def f0(x: C): A = ??? } ; O } // fail
+
+ def fail6() = { // fail
+ trait O0 {
+ def f0(x: A): A = ???
+ def f1(x: A): B = ???
+ def f2(x: A): C = ???
+ }
+ object O1 extends O0
+ O1
+ }
+ def fail7() = { // fail
+ trait O0 {
+ def f0(x: A): A = ???
+ def f1(x: A): B = ???
+ def f2(x: A): C = ???
+ }
+ new O0 { }
+ }
+}
diff --git a/test/files/neg/variances.check b/test/files/neg/variances.check
index 0643e533b7..7d965e94dc 100644
--- a/test/files/neg/variances.check
+++ b/test/files/neg/variances.check
@@ -13,7 +13,10 @@ variances.scala:21: error: covariant type A occurs in invariant position in supe
variances.scala:74: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: A => A} of value x
val x: T[A] {
^
+variances.scala:89: error: covariant type T occurs in invariant position in type T of type A
+ type A = T
+ ^
variances.scala:90: error: covariant type T occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
def foo: B[A]
^
-6 errors found
+7 errors found
diff --git a/test/files/neg/variances2.check b/test/files/neg/variances2.check
new file mode 100644
index 0000000000..433cc125ad
--- /dev/null
+++ b/test/files/neg/variances2.check
@@ -0,0 +1,229 @@
+variances2.scala:9: error: covariant type B occurs in contravariant position in type B of value x
+ def f1(x: B): Unit = ()
+ ^
+variances2.scala:12: error: covariant type E occurs in contravariant position in type E of value x
+ def f4(x: E): Unit = ()
+ ^
+variances2.scala:15: error: contravariant type A occurs in covariant position in type ()A of method f6
+ def f6(): A = ???
+ ^
+variances2.scala:18: error: contravariant type D occurs in covariant position in type ()D of method f9
+ def f9(): D = ???
+ ^
+variances2.scala:22: error: contravariant type A occurs in covariant position in type A => A of value f
+ def f12(f: A => A): Unit = ()
+ ^
+variances2.scala:23: error: contravariant type A occurs in covariant position in type A => B of value f
+ def f13(f: A => B): Unit = ()
+ ^
+variances2.scala:24: error: contravariant type A occurs in covariant position in type A => C of value f
+ def f14(f: A => C): Unit = ()
+ ^
+variances2.scala:25: error: contravariant type A occurs in covariant position in type A => D of value f
+ def f15(f: A => D): Unit = ()
+ ^
+variances2.scala:26: error: contravariant type A occurs in covariant position in type A => E of value f
+ def f16(f: A => E): Unit = ()
+ ^
+variances2.scala:27: error: contravariant type A occurs in covariant position in type A => F of value f
+ def f17(f: A => F): Unit = ()
+ ^
+variances2.scala:29: error: covariant type B occurs in contravariant position in type B => B of value f
+ def f19(f: B => B): Unit = ()
+ ^
+variances2.scala:32: error: covariant type E occurs in contravariant position in type B => E of value f
+ def f22(f: B => E): Unit = ()
+ ^
+variances2.scala:35: error: covariant type B occurs in contravariant position in type C => B of value f
+ def f25(f: C => B): Unit = ()
+ ^
+variances2.scala:38: error: covariant type E occurs in contravariant position in type C => E of value f
+ def f28(f: C => E): Unit = ()
+ ^
+variances2.scala:40: error: contravariant type D occurs in covariant position in type D => A of value f
+ def f30(f: D => A): Unit = ()
+ ^
+variances2.scala:41: error: contravariant type D occurs in covariant position in type D => B of value f
+ def f31(f: D => B): Unit = ()
+ ^
+variances2.scala:42: error: contravariant type D occurs in covariant position in type D => C of value f
+ def f32(f: D => C): Unit = ()
+ ^
+variances2.scala:43: error: contravariant type D occurs in covariant position in type D => D of value f
+ def f33(f: D => D): Unit = ()
+ ^
+variances2.scala:44: error: contravariant type D occurs in covariant position in type D => E of value f
+ def f34(f: D => E): Unit = ()
+ ^
+variances2.scala:45: error: contravariant type D occurs in covariant position in type D => F of value f
+ def f35(f: D => F): Unit = ()
+ ^
+variances2.scala:47: error: covariant type B occurs in contravariant position in type E => B of value f
+ def f37(f: E => B): Unit = ()
+ ^
+variances2.scala:50: error: covariant type E occurs in contravariant position in type E => E of value f
+ def f40(f: E => E): Unit = ()
+ ^
+variances2.scala:53: error: covariant type B occurs in contravariant position in type F => B of value f
+ def f43(f: F => B): Unit = ()
+ ^
+variances2.scala:56: error: covariant type E occurs in contravariant position in type F => E of value f
+ def f46(f: F => E): Unit = ()
+ ^
+variances2.scala:59: error: contravariant type A occurs in covariant position in type ()A => A of method f48
+ def f48(): A => A = null
+ ^
+variances2.scala:62: error: contravariant type D occurs in covariant position in type ()A => D of method f51
+ def f51(): A => D = null
+ ^
+variances2.scala:65: error: covariant type B occurs in contravariant position in type ()B => A of method f54
+ def f54(): B => A = null
+ ^
+variances2.scala:66: error: covariant type B occurs in contravariant position in type ()B => B of method f55
+ def f55(): B => B = null
+ ^
+variances2.scala:67: error: covariant type B occurs in contravariant position in type ()B => C of method f56
+ def f56(): B => C = null
+ ^
+variances2.scala:68: error: covariant type B occurs in contravariant position in type ()B => D of method f57
+ def f57(): B => D = null
+ ^
+variances2.scala:69: error: covariant type B occurs in contravariant position in type ()B => E of method f58
+ def f58(): B => E = null
+ ^
+variances2.scala:70: error: covariant type B occurs in contravariant position in type ()B => F of method f59
+ def f59(): B => F = null
+ ^
+variances2.scala:71: error: contravariant type A occurs in covariant position in type ()C => A of method f60
+ def f60(): C => A = null
+ ^
+variances2.scala:74: error: contravariant type D occurs in covariant position in type ()C => D of method f63
+ def f63(): C => D = null
+ ^
+variances2.scala:77: error: contravariant type A occurs in covariant position in type ()D => A of method f66
+ def f66(): D => A = null
+ ^
+variances2.scala:80: error: contravariant type D occurs in covariant position in type ()D => D of method f69
+ def f69(): D => D = null
+ ^
+variances2.scala:83: error: covariant type E occurs in contravariant position in type ()E => A of method f72
+ def f72(): E => A = null
+ ^
+variances2.scala:84: error: covariant type E occurs in contravariant position in type ()E => B of method f73
+ def f73(): E => B = null
+ ^
+variances2.scala:85: error: covariant type E occurs in contravariant position in type ()E => C of method f74
+ def f74(): E => C = null
+ ^
+variances2.scala:86: error: covariant type E occurs in contravariant position in type ()E => D of method f75
+ def f75(): E => D = null
+ ^
+variances2.scala:87: error: covariant type E occurs in contravariant position in type ()E => E of method f76
+ def f76(): E => E = null
+ ^
+variances2.scala:88: error: covariant type E occurs in contravariant position in type ()E => F of method f77
+ def f77(): E => F = null
+ ^
+variances2.scala:89: error: contravariant type A occurs in covariant position in type ()F => A of method f78
+ def f78(): F => A = null
+ ^
+variances2.scala:92: error: contravariant type D occurs in covariant position in type ()F => D of method f81
+ def f81(): F => D = null
+ ^
+variances2.scala:96: error: contravariant type A occurs in covariant position in type (x: A)A of method f84
+ def f84(x: A): A = ???
+ ^
+variances2.scala:99: error: contravariant type D occurs in covariant position in type (x: A)D of method f87
+ def f87(x: A): D = ???
+ ^
+variances2.scala:102: error: contravariant type A occurs in covariant position in type (x: B)A of method f90
+ def f90(x: B): A = ???
+ ^
+variances2.scala:102: error: covariant type B occurs in contravariant position in type B of value x
+ def f90(x: B): A = ???
+ ^
+variances2.scala:103: error: covariant type B occurs in contravariant position in type B of value x
+ def f91(x: B): B = ???
+ ^
+variances2.scala:104: error: covariant type B occurs in contravariant position in type B of value x
+ def f92(x: B): C = ???
+ ^
+variances2.scala:105: error: contravariant type D occurs in covariant position in type (x: B)D of method f93
+ def f93(x: B): D = ???
+ ^
+variances2.scala:105: error: covariant type B occurs in contravariant position in type B of value x
+ def f93(x: B): D = ???
+ ^
+variances2.scala:106: error: covariant type B occurs in contravariant position in type B of value x
+ def f94(x: B): E = ???
+ ^
+variances2.scala:107: error: covariant type B occurs in contravariant position in type B of value x
+ def f95(x: B): F = ???
+ ^
+variances2.scala:108: error: contravariant type A occurs in covariant position in type (x: C)A of method f96
+ def f96(x: C): A = ???
+ ^
+variances2.scala:111: error: contravariant type D occurs in covariant position in type (x: C)D of method f99
+ def f99(x: C): D = ???
+ ^
+variances2.scala:114: error: contravariant type A occurs in covariant position in type (x: D)A of method f102
+ def f102(x: D): A = ???
+ ^
+variances2.scala:117: error: contravariant type D occurs in covariant position in type (x: D)D of method f105
+ def f105(x: D): D = ???
+ ^
+variances2.scala:120: error: contravariant type A occurs in covariant position in type (x: E)A of method f108
+ def f108(x: E): A = ???
+ ^
+variances2.scala:120: error: covariant type E occurs in contravariant position in type E of value x
+ def f108(x: E): A = ???
+ ^
+variances2.scala:121: error: covariant type E occurs in contravariant position in type E of value x
+ def f109(x: E): B = ???
+ ^
+variances2.scala:122: error: covariant type E occurs in contravariant position in type E of value x
+ def f110(x: E): C = ???
+ ^
+variances2.scala:123: error: contravariant type D occurs in covariant position in type (x: E)D of method f111
+ def f111(x: E): D = ???
+ ^
+variances2.scala:123: error: covariant type E occurs in contravariant position in type E of value x
+ def f111(x: E): D = ???
+ ^
+variances2.scala:124: error: covariant type E occurs in contravariant position in type E of value x
+ def f112(x: E): E = ???
+ ^
+variances2.scala:125: error: covariant type E occurs in contravariant position in type E of value x
+ def f113(x: E): F = ???
+ ^
+variances2.scala:126: error: contravariant type A occurs in covariant position in type (x: F)A of method f114
+ def f114(x: F): A = ???
+ ^
+variances2.scala:129: error: contravariant type D occurs in covariant position in type (x: F)D of method f117
+ def f117(x: F): D = ???
+ ^
+variances2.scala:133: error: contravariant type A occurs in covariant position in supertype Cov[A] of object O1
+ object O1 extends Cov[A]
+ ^
+variances2.scala:136: error: contravariant type D occurs in covariant position in supertype Cov[D] of object O4
+ object O4 extends Cov[D]
+ ^
+variances2.scala:140: error: covariant type B occurs in contravariant position in supertype Con[B] of object O8
+ object O8 extends Con[B]
+ ^
+variances2.scala:143: error: covariant type E occurs in contravariant position in supertype Con[E] of object O11
+ object O11 extends Con[E]
+ ^
+variances2.scala:145: error: contravariant type A occurs in invariant position in supertype Inv[A] of object O13
+ object O13 extends Inv[A]
+ ^
+variances2.scala:146: error: covariant type B occurs in invariant position in supertype Inv[B] of object O14
+ object O14 extends Inv[B]
+ ^
+variances2.scala:148: error: contravariant type D occurs in invariant position in supertype Inv[D] of object O16
+ object O16 extends Inv[D]
+ ^
+variances2.scala:149: error: covariant type E occurs in invariant position in supertype Inv[E] of object O17
+ object O17 extends Inv[E]
+ ^
+76 errors found
diff --git a/test/files/neg/variances2.scala b/test/files/neg/variances2.scala
new file mode 100644
index 0000000000..d30345dd83
--- /dev/null
+++ b/test/files/neg/variances2.scala
@@ -0,0 +1,303 @@
+trait Cov[+A]
+trait Con[-A]
+trait Inv[A]
+
+trait Trait[-A, +B, C] {
+ // trait Inner[-D <: C, +E >: C, F] {
+ trait Inner[-D <: C, +E >: C, F] {
+ def f0(x: A): Unit = ()
+ def f1(x: B): Unit = ()
+ def f2(x: C): Unit = ()
+ def f3(x: D): Unit = ()
+ def f4(x: E): Unit = ()
+ def f5(x: F): Unit = ()
+
+ def f6(): A = ???
+ def f7(): B = ???
+ def f8(): C = ???
+ def f9(): D = ???
+ def f10(): E = ???
+ def f11(): F = ???
+
+ def f12(f: A => A): Unit = ()
+ def f13(f: A => B): Unit = ()
+ def f14(f: A => C): Unit = ()
+ def f15(f: A => D): Unit = ()
+ def f16(f: A => E): Unit = ()
+ def f17(f: A => F): Unit = ()
+ def f18(f: B => A): Unit = ()
+ def f19(f: B => B): Unit = ()
+ def f20(f: B => C): Unit = ()
+ def f21(f: B => D): Unit = ()
+ def f22(f: B => E): Unit = ()
+ def f23(f: B => F): Unit = ()
+ def f24(f: C => A): Unit = ()
+ def f25(f: C => B): Unit = ()
+ def f26(f: C => C): Unit = ()
+ def f27(f: C => D): Unit = ()
+ def f28(f: C => E): Unit = ()
+ def f29(f: C => F): Unit = ()
+ def f30(f: D => A): Unit = ()
+ def f31(f: D => B): Unit = ()
+ def f32(f: D => C): Unit = ()
+ def f33(f: D => D): Unit = ()
+ def f34(f: D => E): Unit = ()
+ def f35(f: D => F): Unit = ()
+ def f36(f: E => A): Unit = ()
+ def f37(f: E => B): Unit = ()
+ def f38(f: E => C): Unit = ()
+ def f39(f: E => D): Unit = ()
+ def f40(f: E => E): Unit = ()
+ def f41(f: E => F): Unit = ()
+ def f42(f: F => A): Unit = ()
+ def f43(f: F => B): Unit = ()
+ def f44(f: F => C): Unit = ()
+ def f45(f: F => D): Unit = ()
+ def f46(f: F => E): Unit = ()
+ def f47(f: F => F): Unit = ()
+
+ def f48(): A => A = null
+ def f49(): A => B = null
+ def f50(): A => C = null
+ def f51(): A => D = null
+ def f52(): A => E = null
+ def f53(): A => F = null
+ def f54(): B => A = null
+ def f55(): B => B = null
+ def f56(): B => C = null
+ def f57(): B => D = null
+ def f58(): B => E = null
+ def f59(): B => F = null
+ def f60(): C => A = null
+ def f61(): C => B = null
+ def f62(): C => C = null
+ def f63(): C => D = null
+ def f64(): C => E = null
+ def f65(): C => F = null
+ def f66(): D => A = null
+ def f67(): D => B = null
+ def f68(): D => C = null
+ def f69(): D => D = null
+ def f70(): D => E = null
+ def f71(): D => F = null
+ def f72(): E => A = null
+ def f73(): E => B = null
+ def f74(): E => C = null
+ def f75(): E => D = null
+ def f76(): E => E = null
+ def f77(): E => F = null
+ def f78(): F => A = null
+ def f79(): F => B = null
+ def f80(): F => C = null
+ def f81(): F => D = null
+ def f82(): F => E = null
+ def f83(): F => F = null
+
+ def f84(x: A): A = ???
+ def f85(x: A): B = ???
+ def f86(x: A): C = ???
+ def f87(x: A): D = ???
+ def f88(x: A): E = ???
+ def f89(x: A): F = ???
+ def f90(x: B): A = ???
+ def f91(x: B): B = ???
+ def f92(x: B): C = ???
+ def f93(x: B): D = ???
+ def f94(x: B): E = ???
+ def f95(x: B): F = ???
+ def f96(x: C): A = ???
+ def f97(x: C): B = ???
+ def f98(x: C): C = ???
+ def f99(x: C): D = ???
+ def f100(x: C): E = ???
+ def f101(x: C): F = ???
+ def f102(x: D): A = ???
+ def f103(x: D): B = ???
+ def f104(x: D): C = ???
+ def f105(x: D): D = ???
+ def f106(x: D): E = ???
+ def f107(x: D): F = ???
+ def f108(x: E): A = ???
+ def f109(x: E): B = ???
+ def f110(x: E): C = ???
+ def f111(x: E): D = ???
+ def f112(x: E): E = ???
+ def f113(x: E): F = ???
+ def f114(x: F): A = ???
+ def f115(x: F): B = ???
+ def f116(x: F): C = ???
+ def f117(x: F): D = ???
+ def f118(x: F): E = ???
+ def f119(x: F): F = ???
+
+ object O1 extends Cov[A]
+ object O2 extends Cov[B]
+ object O3 extends Cov[C]
+ object O4 extends Cov[D]
+ object O5 extends Cov[E]
+ object O6 extends Cov[F]
+ object O7 extends Con[A]
+ object O8 extends Con[B]
+ object O9 extends Con[C]
+ object O10 extends Con[D]
+ object O11 extends Con[E]
+ object O12 extends Con[F]
+ object O13 extends Inv[A]
+ object O14 extends Inv[B]
+ object O15 extends Inv[C]
+ object O16 extends Inv[D]
+ object O17 extends Inv[E]
+ object O18 extends Inv[F]
+ }
+}
+
+trait Trait2[-A, +B, C] {
+ // trait Inner[-D <: C, +E >: C, F] {
+ def method[D <: A, E >: B, F]() {
+ def f0(x: A): Unit = ()
+ def f1(x: B): Unit = ()
+ def f2(x: C): Unit = ()
+ def f3(x: D): Unit = ()
+ def f4(x: E): Unit = ()
+ def f5(x: F): Unit = ()
+
+ def f6(): A = ???
+ def f7(): B = ???
+ def f8(): C = ???
+ def f9(): D = ???
+ def f10(): E = ???
+ def f11(): F = ???
+
+ def f12(f: A => A): Unit = ()
+ def f13(f: A => B): Unit = ()
+ def f14(f: A => C): Unit = ()
+ def f15(f: A => D): Unit = ()
+ def f16(f: A => E): Unit = ()
+ def f17(f: A => F): Unit = ()
+ def f18(f: B => A): Unit = ()
+ def f19(f: B => B): Unit = ()
+ def f20(f: B => C): Unit = ()
+ def f21(f: B => D): Unit = ()
+ def f22(f: B => E): Unit = ()
+ def f23(f: B => F): Unit = ()
+ def f24(f: C => A): Unit = ()
+ def f25(f: C => B): Unit = ()
+ def f26(f: C => C): Unit = ()
+ def f27(f: C => D): Unit = ()
+ def f28(f: C => E): Unit = ()
+ def f29(f: C => F): Unit = ()
+ def f30(f: D => A): Unit = ()
+ def f31(f: D => B): Unit = ()
+ def f32(f: D => C): Unit = ()
+ def f33(f: D => D): Unit = ()
+ def f34(f: D => E): Unit = ()
+ def f35(f: D => F): Unit = ()
+ def f36(f: E => A): Unit = ()
+ def f37(f: E => B): Unit = ()
+ def f38(f: E => C): Unit = ()
+ def f39(f: E => D): Unit = ()
+ def f40(f: E => E): Unit = ()
+ def f41(f: E => F): Unit = ()
+ def f42(f: F => A): Unit = ()
+ def f43(f: F => B): Unit = ()
+ def f44(f: F => C): Unit = ()
+ def f45(f: F => D): Unit = ()
+ def f46(f: F => E): Unit = ()
+ def f47(f: F => F): Unit = ()
+
+ def f48(): A => A = null
+ def f49(): A => B = null
+ def f50(): A => C = null
+ def f51(): A => D = null
+ def f52(): A => E = null
+ def f53(): A => F = null
+ def f54(): B => A = null
+ def f55(): B => B = null
+ def f56(): B => C = null
+ def f57(): B => D = null
+ def f58(): B => E = null
+ def f59(): B => F = null
+ def f60(): C => A = null
+ def f61(): C => B = null
+ def f62(): C => C = null
+ def f63(): C => D = null
+ def f64(): C => E = null
+ def f65(): C => F = null
+ def f66(): D => A = null
+ def f67(): D => B = null
+ def f68(): D => C = null
+ def f69(): D => D = null
+ def f70(): D => E = null
+ def f71(): D => F = null
+ def f72(): E => A = null
+ def f73(): E => B = null
+ def f74(): E => C = null
+ def f75(): E => D = null
+ def f76(): E => E = null
+ def f77(): E => F = null
+ def f78(): F => A = null
+ def f79(): F => B = null
+ def f80(): F => C = null
+ def f81(): F => D = null
+ def f82(): F => E = null
+ def f83(): F => F = null
+
+ def f84(x: A): A = ???
+ def f85(x: A): B = ???
+ def f86(x: A): C = ???
+ def f87(x: A): D = ???
+ def f88(x: A): E = ???
+ def f89(x: A): F = ???
+ def f90(x: B): A = ???
+ def f91(x: B): B = ???
+ def f92(x: B): C = ???
+ def f93(x: B): D = ???
+ def f94(x: B): E = ???
+ def f95(x: B): F = ???
+ def f96(x: C): A = ???
+ def f97(x: C): B = ???
+ def f98(x: C): C = ???
+ def f99(x: C): D = ???
+ def f100(x: C): E = ???
+ def f101(x: C): F = ???
+ def f102(x: D): A = ???
+ def f103(x: D): B = ???
+ def f104(x: D): C = ???
+ def f105(x: D): D = ???
+ def f106(x: D): E = ???
+ def f107(x: D): F = ???
+ def f108(x: E): A = ???
+ def f109(x: E): B = ???
+ def f110(x: E): C = ???
+ def f111(x: E): D = ???
+ def f112(x: E): E = ???
+ def f113(x: E): F = ???
+ def f114(x: F): A = ???
+ def f115(x: F): B = ???
+ def f116(x: F): C = ???
+ def f117(x: F): D = ???
+ def f118(x: F): E = ???
+ def f119(x: F): F = ???
+
+ object O1 extends Cov[A]
+ object O2 extends Cov[B]
+ object O3 extends Cov[C]
+ object O4 extends Cov[D]
+ object O5 extends Cov[E]
+ object O6 extends Cov[F]
+ object O7 extends Con[A]
+ object O8 extends Con[B]
+ object O9 extends Con[C]
+ object O10 extends Con[D]
+ object O11 extends Con[E]
+ object O12 extends Con[F]
+ object O13 extends Inv[A]
+ object O14 extends Inv[B]
+ object O15 extends Inv[C]
+ object O16 extends Inv[D]
+ object O17 extends Inv[E]
+ object O18 extends Inv[F]
+
+ ()
+ }
+}
diff --git a/test/files/neg/virtpatmat_reach_null.check b/test/files/neg/virtpatmat_reach_null.check
index 595c8ec889..e0c36c8c5b 100644
--- a/test/files/neg/virtpatmat_reach_null.check
+++ b/test/files/neg/virtpatmat_reach_null.check
@@ -1,4 +1,6 @@
-virtpatmat_reach_null.scala:13: error: unreachable code
+virtpatmat_reach_null.scala:13: warning: unreachable code
case _ => // unreachable
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/virtpatmat_reach_sealed_unsealed.check b/test/files/neg/virtpatmat_reach_sealed_unsealed.check
index 10638eff52..064a12bcaa 100644
--- a/test/files/neg/virtpatmat_reach_sealed_unsealed.check
+++ b/test/files/neg/virtpatmat_reach_sealed_unsealed.check
@@ -1,14 +1,16 @@
-virtpatmat_reach_sealed_unsealed.scala:16: error: match may not be exhaustive.
+virtpatmat_reach_sealed_unsealed.scala:16: warning: match may not be exhaustive.
It would fail on the following input: false
(true: Boolean) match { case true => } // not exhaustive, but reachable
^
-virtpatmat_reach_sealed_unsealed.scala:18: error: unreachable code
+virtpatmat_reach_sealed_unsealed.scala:18: warning: unreachable code
(true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable
^
-virtpatmat_reach_sealed_unsealed.scala:19: error: unreachable code
+virtpatmat_reach_sealed_unsealed.scala:19: warning: unreachable code
(true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable
^
-virtpatmat_reach_sealed_unsealed.scala:20: error: unreachable code
+virtpatmat_reach_sealed_unsealed.scala:20: warning: unreachable code
(true: Boolean) match { case true => case false => case _: Any => } // exhaustive, last case is unreachable
^
-four errors found
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/virtpatmat_unreach_select.check b/test/files/neg/virtpatmat_unreach_select.check
index 3771971020..4fc78cd412 100644
--- a/test/files/neg/virtpatmat_unreach_select.check
+++ b/test/files/neg/virtpatmat_unreach_select.check
@@ -1,4 +1,6 @@
-virtpatmat_unreach_select.scala:10: error: unreachable code
+virtpatmat_unreach_select.scala:10: warning: unreachable code
case WARNING.id => // unreachable
^
+error: No warnings can be incurred under -Xfatal-warnings.
+one warning found
one error found
diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check
new file mode 100644
index 0000000000..4628033e55
--- /dev/null
+++ b/test/files/neg/warn-inferred-any.check
@@ -0,0 +1,12 @@
+warn-inferred-any.scala:8: warning: a type was inferred to be `Any`; this may indicate a programming error.
+ { List(1, 2, 3) contains "a" } // only this warns
+ ^
+warn-inferred-any.scala:16: warning: a type was inferred to be `AnyVal`; this may indicate a programming error.
+ { 1l to 5l contains 5 }
+ ^
+warn-inferred-any.scala:17: warning: a type was inferred to be `AnyVal`; this may indicate a programming error.
+ { 1l to 5l contains 5d }
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/warn-inferred-any.flags b/test/files/neg/warn-inferred-any.flags
new file mode 100644
index 0000000000..a3127d392a
--- /dev/null
+++ b/test/files/neg/warn-inferred-any.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Ywarn-infer-any
diff --git a/test/files/neg/warn-inferred-any.scala b/test/files/neg/warn-inferred-any.scala
new file mode 100644
index 0000000000..b853e6e5a8
--- /dev/null
+++ b/test/files/neg/warn-inferred-any.scala
@@ -0,0 +1,19 @@
+trait Foo[-A <: AnyRef, +B <: AnyRef] {
+ def run[U](x: A)(action: B => U): Boolean = ???
+
+ { run(_: A)(_: B => String) }
+}
+
+trait Xs[+A] {
+ { List(1, 2, 3) contains "a" } // only this warns
+ { List(1, 2, 3) contains 1 }
+ { identity(List(1, 2, 3) contains 1) }
+ { List("a") foreach println }
+}
+
+trait Ys[+A] {
+ { 1 to 5 contains 5l }
+ { 1l to 5l contains 5 }
+ { 1l to 5l contains 5d }
+ { 1l to 5l contains 5l }
+}
diff --git a/test/files/neg/warn-unused-imports.check b/test/files/neg/warn-unused-imports.check
new file mode 100644
index 0000000000..1b938f4fd7
--- /dev/null
+++ b/test/files/neg/warn-unused-imports.check
@@ -0,0 +1,44 @@
+warn-unused-imports.scala:7: warning: it is not recommended to define classes/objects inside of package objects.
+If possible, define class A in package p1 instead.
+ class A
+ ^
+warn-unused-imports.scala:13: warning: it is not recommended to define classes/objects inside of package objects.
+If possible, define class A in package p2 instead.
+ class A
+ ^
+warn-unused-imports.scala:57: warning: Unused import
+ import p1.A // warn
+ ^
+warn-unused-imports.scala:62: warning: Unused import
+ import p1.{ A, B } // warn on A
+ ^
+warn-unused-imports.scala:67: warning: Unused import
+ import p1.{ A, B } // warn on both
+ ^
+warn-unused-imports.scala:67: warning: Unused import
+ import p1.{ A, B } // warn on both
+ ^
+warn-unused-imports.scala:73: warning: Unused import
+ import c._ // warn
+ ^
+warn-unused-imports.scala:78: warning: Unused import
+ import p1._ // warn
+ ^
+warn-unused-imports.scala:85: warning: Unused import
+ import c._ // warn
+ ^
+warn-unused-imports.scala:91: warning: Unused import
+ import p1.c._ // warn
+ ^
+warn-unused-imports.scala:98: warning: Unused import
+ import p1._ // warn
+ ^
+warn-unused-imports.scala:118: warning: Unused import
+ import p1.A // warn
+ ^
+warn-unused-imports.scala:99: warning: local trait Warn is never used
+ trait Warn { // warn about unused local trait for good measure
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+13 warnings found
+one error found
diff --git a/test/files/neg/warn-unused-imports.flags b/test/files/neg/warn-unused-imports.flags
new file mode 100644
index 0000000000..954eaba352
--- /dev/null
+++ b/test/files/neg/warn-unused-imports.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
diff --git a/test/files/neg/warn-unused-imports.scala b/test/files/neg/warn-unused-imports.scala
new file mode 100644
index 0000000000..b7a2f1c414
--- /dev/null
+++ b/test/files/neg/warn-unused-imports.scala
@@ -0,0 +1,125 @@
+class Bippo {
+ def length: Int = 123
+ class Tree
+}
+
+package object p1 {
+ class A
+ implicit class B(val s: String) { def bippy = s }
+ val c: Bippo = new Bippo
+ type D = String
+}
+package object p2 {
+ class A
+ implicit class B(val s: String) { def bippy = s }
+ val c: Bippo = new Bippo
+ type D = Int
+}
+
+trait NoWarn {
+ {
+ import p1._ // no warn
+ println("abc".bippy)
+ }
+
+ {
+ import p1._ // no warn
+ println(new A)
+ }
+
+ {
+ import p1.B // no warn
+ println("abc".bippy)
+ }
+
+ {
+ import p1._ // no warn
+ import c._ // no warn
+ println(length)
+ }
+
+ {
+ import p1._ // no warn
+ import c._ // no warn
+ val x: Tree = null
+ println(x)
+ }
+
+ {
+ import p1.D // no warn
+ val x: D = null
+ println(x)
+ }
+}
+
+trait Warn {
+ {
+ import p1.A // warn
+ println(123)
+ }
+
+ {
+ import p1.{ A, B } // warn on A
+ println("abc".bippy)
+ }
+
+ {
+ import p1.{ A, B } // warn on both
+ println(123)
+ }
+
+ {
+ import p1._ // no warn (technically this could warn, but not worth the effort to unroll unusedness transitively)
+ import c._ // warn
+ println(123)
+ }
+
+ {
+ import p1._ // warn
+ println(123)
+ }
+
+ {
+ class Tree
+ import p1._ // no warn
+ import c._ // warn
+ val x: Tree = null
+ println(x)
+ }
+
+ {
+ import p1.c._ // warn
+ println(123)
+ }
+}
+
+trait Nested {
+ {
+ import p1._ // warn
+ trait Warn { // warn about unused local trait for good measure
+ import p2._
+ println(new A)
+ println("abc".bippy)
+ }
+ println("")
+ }
+
+ {
+ import p1._ // no warn
+ trait NoWarn {
+ import p2.B // no warn
+ println("abc".bippy)
+ println(new A)
+ }
+ println(new NoWarn { })
+ }
+
+ {
+ import p1.A // warn
+ trait Warn {
+ import p2.A
+ println(new A)
+ }
+ println(new Warn { })
+ }
+}
diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check
new file mode 100644
index 0000000000..9c41a33e8f
--- /dev/null
+++ b/test/files/neg/warn-unused-privates.check
@@ -0,0 +1,63 @@
+warn-unused-privates.scala:2: warning: private constructor in class Bippy is never used
+ private def this(c: Int) = this(c, c) // warn
+ ^
+warn-unused-privates.scala:4: warning: private method in class Bippy is never used
+ private def boop(x: Int) = x+a+b // warn
+ ^
+warn-unused-privates.scala:6: warning: private val in class Bippy is never used
+ final private val MILLIS2: Int = 1000 // warn
+ ^
+warn-unused-privates.scala:13: warning: private val in object Bippy is never used
+ private val HEY_INSTANCE: Int = 1000 // warn
+ ^
+warn-unused-privates.scala:35: warning: private val in class Boppy is never used
+ private val hummer = "def" // warn
+ ^
+warn-unused-privates.scala:42: warning: private var in trait Accessors is never used
+ private var v1: Int = 0 // warn
+ ^
+warn-unused-privates.scala:43: warning: private setter in trait Accessors is never used
+ private var v2: Int = 0 // warn, never set
+ ^
+warn-unused-privates.scala:44: warning: private var in trait Accessors is never used
+ private var v3: Int = 0 // warn, never got
+ ^
+warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used
+ private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
+ ^
+warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used
+ private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
+ ^
+warn-unused-privates.scala:67: warning: local var in method f0 is never used
+ var x = 1 // warn
+ ^
+warn-unused-privates.scala:74: warning: local val in method f1 is never used
+ val b = new Outer // warn
+ ^
+warn-unused-privates.scala:84: warning: private object in object Types is never used
+ private object Dongo { def f = this } // warn
+ ^
+warn-unused-privates.scala:94: warning: local object in method l1 is never used
+ object HiObject { def f = this } // warn
+ ^
+warn-unused-privates.scala:78: warning: local var x in method f2 is never set - it could be a val
+ var x = 100 // warn about it being a var
+ ^
+warn-unused-privates.scala:85: warning: private class Bar1 in object Types is never used
+ private class Bar1 // warn
+ ^
+warn-unused-privates.scala:87: warning: private type Alias1 in object Types is never used
+ private type Alias1 = String // warn
+ ^
+warn-unused-privates.scala:95: warning: local class Hi is never used
+ class Hi { // warn
+ ^
+warn-unused-privates.scala:99: warning: local class DingDongDoobie is never used
+ class DingDongDoobie // warn
+ ^
+warn-unused-privates.scala:102: warning: local type OtherThing is never used
+ type OtherThing = String // warn
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+20 warnings found
+one error found
diff --git a/test/files/neg/warn-unused-privates.flags b/test/files/neg/warn-unused-privates.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/neg/warn-unused-privates.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala
new file mode 100644
index 0000000000..cb6e946a34
--- /dev/null
+++ b/test/files/neg/warn-unused-privates.scala
@@ -0,0 +1,105 @@
+class Bippy(a: Int, b: Int) {
+ private def this(c: Int) = this(c, c) // warn
+ private def bippy(x: Int): Int = bippy(x) // TODO: could warn
+ private def boop(x: Int) = x+a+b // warn
+ final private val MILLIS1 = 2000 // no warn, might have been inlined
+ final private val MILLIS2: Int = 1000 // warn
+ final private val HI_COMPANION: Int = 500 // no warn, accessed from companion
+ def hi() = Bippy.HI_INSTANCE
+}
+object Bippy {
+ def hi(x: Bippy) = x.HI_COMPANION
+ private val HI_INSTANCE: Int = 500 // no warn, accessed from instance
+ private val HEY_INSTANCE: Int = 1000 // warn
+}
+
+class A(val msg: String)
+class B1(msg: String) extends A(msg)
+class B2(msg0: String) extends A(msg0)
+class B3(msg0: String) extends A("msg")
+
+/*** Early defs warnings disabled primarily due to SI-6595.
+ * The test case is here to assure we aren't issuing false positives;
+ * the ones labeled "warn" don't warn.
+ ***/
+class Boppy extends {
+ private val hmm: String = "abc" // no warn, used in early defs
+ private val hom: String = "def" // no warn, used in body
+ private final val him = "ghi" // no warn, might have been (was) inlined
+ final val him2 = "ghi" // no warn, same
+ final val himinline = him
+ private val hum: String = "jkl" // warn
+ final val ding = hmm.length
+} with Mutable {
+ val dinger = hom
+ private val hummer = "def" // warn
+
+ private final val bum = "ghi" // no warn, might have been (was) inlined
+ final val bum2 = "ghi" // no warn, same
+}
+
+trait Accessors {
+ private var v1: Int = 0 // warn
+ private var v2: Int = 0 // warn, never set
+ private var v3: Int = 0 // warn, never got
+ private var v4: Int = 0 // no warn
+
+ def bippy(): Int = {
+ v3 = 5
+ v4 = 6
+ v2 + v4
+ }
+}
+
+trait DefaultArgs {
+ // warn about default getters for x2 and x3
+ private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3
+
+ def boppy() = bippy(5, 100, 200)
+}
+
+class Outer {
+ class Inner
+}
+
+trait Locals {
+ def f0 = {
+ var x = 1 // warn
+ var y = 2
+ y = 3
+ y + y
+ }
+ def f1 = {
+ val a = new Outer // no warn
+ val b = new Outer // warn
+ new a.Inner
+ }
+ def f2 = {
+ var x = 100 // warn about it being a var
+ x
+ }
+}
+
+object Types {
+ private object Dongo { def f = this } // warn
+ private class Bar1 // warn
+ private class Bar2 // no warn
+ private type Alias1 = String // warn
+ private type Alias2 = String // no warn
+ def bippo = (new Bar2).toString
+
+ def f(x: Alias2) = x.length
+
+ def l1() = {
+ object HiObject { def f = this } // warn
+ class Hi { // warn
+ def f1: Hi = new Hi
+ def f2(x: Hi) = x
+ }
+ class DingDongDoobie // warn
+ class Bippy // no warn
+ type Something = Bippy // no warn
+ type OtherThing = String // warn
+ (new Bippy): Something
+ }
+}
diff --git a/test/files/pos/CustomGlobal.scala b/test/files/pos/CustomGlobal.scala
index 30bf227950..a5668bd7c0 100644
--- a/test/files/pos/CustomGlobal.scala
+++ b/test/files/pos/CustomGlobal.scala
@@ -22,7 +22,7 @@ class CustomGlobal(currentSettings: Settings, reporter: Reporter) extends Global
override def newTyper(context: Context): Typer = new CustomTyper(context)
class CustomTyper(context : Context) extends Typer(context) {
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
if (tree.summaryString contains "Bippy")
println("I'm typing a Bippy! It's a " + tree.shortClass + ".")
diff --git a/test/files/pos/List1.scala b/test/files/pos/List1.scala
index 9d3a51f4e3..30ebf5e1e7 100644
--- a/test/files/pos/List1.scala
+++ b/test/files/pos/List1.scala
@@ -9,15 +9,15 @@ object lists {
def Nil[b] = new List[b] {
def isEmpty: Boolean = true;
- def head = error("head of Nil");
- def tail = error("tail of Nil");
+ def head = sys.error("head of Nil");
+ def tail = sys.error("tail of Nil");
}
def Cons[c](x: c, xs: List[c]): List[c] = new List[c] {
def isEmpty = false;
def head = x;
def tail = xs;
- }
+ }
def foo = {
val intnil = Nil[Int];
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
index d92fbca380..cf58bc3dfd 100644
--- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -21,7 +21,7 @@ object Macros {
// normalize argument name
var b1 = new Transformer {
override def transform(tree: Tree): Tree = tree match {
- case Ident(x) if (x==n) => Ident(newTermName("_arg"))
+ case Ident(x) if (x==n) => Ident(TermName("_arg"))
case tt @ TypeTree() if tt.original != null => TypeTree(tt.tpe) setOriginal transform(tt.original)
// without the fix to LazyTreeCopier.Annotated, we would need to uncomment the line below to make the macro work
// that's because the pattern match in the input expression gets expanded into Typed(<x>, TypeTree(<Int @unchecked>))
diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala
index 706a715bad..4832ce4ecd 100644
--- a/test/files/pos/annotations.scala
+++ b/test/files/pos/annotations.scala
@@ -2,7 +2,7 @@ class ann(i: Int) extends scala.annotation.Annotation
class cfann(x: String) extends annotation.ClassfileAnnotation
// annotations on abstract types
-abstract class C1[@serializable @cloneable +T, U, V[_]]
+abstract class C1[@annotation.elidable(0) +T, U, V[_]]
abstract class C2[@deprecated
@ann(1) T <: Number,
V]
diff --git a/test/files/pos/annotations2.scala b/test/files/pos/annotations2.scala
new file mode 100644
index 0000000000..3bce7f8ac4
--- /dev/null
+++ b/test/files/pos/annotations2.scala
@@ -0,0 +1,31 @@
+
+class B[T](x: (T, T)) {
+ def this(xx: (T, Any, Any)) = this((xx._1, xx._1))
+}
+class BAnn[T](x: (T, T)) extends scala.annotation.StaticAnnotation {
+ def this(xx: (T, Any, Any)) = this((xx._1, xx._1))
+}
+class CAnn[T](x: (T, T)) extends scala.annotation.StaticAnnotation {
+ def this(xx: Class[T]) = this((xx.newInstance(), xx.newInstance()))
+}
+
+class A1 {
+ val b1 = new B((1, 2, 3))
+ val b2 = new B((1, 2))
+ val b3 = new B[Int]((1, 2, 3))
+ val b4 = new B[Int]((1, 2))
+}
+
+class A2 {
+ @BAnn((1, 2, 3)) val b1 = null
+ @BAnn((1, 2)) val b2 = null
+ @BAnn[Int]((1, 2, 3)) val b3 = null
+ @BAnn[Int]((1, 2)) val b4 = null
+}
+
+class A3 {
+ @CAnn(classOf[Int]) val b1 = null
+ @CAnn((1, 2)) val b2 = null
+ @CAnn[Int](classOf[Int]) val b3 = null
+ @CAnn[Int]((1, 2)) val b4 = null
+}
diff --git a/test/pending/run/t5418.check b/test/files/pos/attachments-typed-another-ident.check
index e69de29bb2..e69de29bb2 100644
--- a/test/pending/run/t5418.check
+++ b/test/files/pos/attachments-typed-another-ident.check
diff --git a/test/files/neg/t5692a.flags b/test/files/pos/attachments-typed-another-ident.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/t5692a.flags
+++ b/test/files/pos/attachments-typed-another-ident.flags
diff --git a/test/files/pos/attachments-typed-another-ident/Impls_1.scala b/test/files/pos/attachments-typed-another-ident/Impls_1.scala
new file mode 100644
index 0000000000..c3f541075e
--- /dev/null
+++ b/test/files/pos/attachments-typed-another-ident/Impls_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object MyAttachment
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val ident = Ident(TermName("bar")) updateAttachment MyAttachment
+ assert(ident.attachments.get[MyAttachment.type].isDefined, ident.attachments)
+ val typed = c.typeCheck(ident)
+ assert(typed.attachments.get[MyAttachment.type].isDefined, typed.attachments)
+ c.Expr[Int](typed)
+ }
+
+ def foo = macro impl
+}
diff --git a/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala
new file mode 100644
index 0000000000..022639bfe9
--- /dev/null
+++ b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ def bar = 2
+ Macros.foo
+}
+
diff --git a/test/files/pos/attachments-typed-ident/Impls_1.scala b/test/files/pos/attachments-typed-ident/Impls_1.scala
index cc40893a93..c382cabc59 100644
--- a/test/files/pos/attachments-typed-ident/Impls_1.scala
+++ b/test/files/pos/attachments-typed-ident/Impls_1.scala
@@ -6,7 +6,7 @@ object MyAttachment
object Macros {
def impl(c: Context) = {
import c.universe._
- val ident = Ident(newTermName("bar")) updateAttachment MyAttachment
+ val ident = Ident(TermName("bar")) updateAttachment MyAttachment
assert(ident.attachments.get[MyAttachment.type].isDefined, ident.attachments)
val typed = c.typeCheck(ident)
assert(typed.attachments.get[MyAttachment.type].isDefined, typed.attachments)
diff --git a/test/files/pos/attributes.scala b/test/files/pos/attributes.scala
index ec735d0aae..60e00bff7d 100644
--- a/test/files/pos/attributes.scala
+++ b/test/files/pos/attributes.scala
@@ -1,3 +1,5 @@
+class serializable extends annotation.StaticAnnotation
+
@serializable class C1;
@serializable @volatile class C2;
@serializable @volatile class C3;
diff --git a/test/files/pos/chang/Test.scala b/test/files/pos/chang/Test.scala
index 9bb745e377..f74c6355b5 100644
--- a/test/files/pos/chang/Test.scala
+++ b/test/files/pos/chang/Test.scala
@@ -1,3 +1,3 @@
-object Test extends Application {
+object Test extends App {
new com.netgents.hello.Outer[String]
}
diff --git a/test/files/pos/cycle-jsoup.flags b/test/files/pos/cycle-jsoup.flags
new file mode 100644
index 0000000000..ca20f55172
--- /dev/null
+++ b/test/files/pos/cycle-jsoup.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/pos/cycle-jsoup.scala b/test/files/pos/cycle-jsoup.scala
new file mode 100644
index 0000000000..879e693537
--- /dev/null
+++ b/test/files/pos/cycle-jsoup.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args : Array[String]) {
+ org.jsoup.Jsoup.parse(null: java.net.URL, 3000)
+ }
+}
diff --git a/test/files/pos/cycle.flags b/test/files/pos/cycle.flags
new file mode 100644
index 0000000000..ca20f55172
--- /dev/null
+++ b/test/files/pos/cycle.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/pos/cycle/J_1.java b/test/files/pos/cycle/J_1.java
new file mode 100644
index 0000000000..0cc218eebe
--- /dev/null
+++ b/test/files/pos/cycle/J_1.java
@@ -0,0 +1,16 @@
+package bar;
+
+public class J_1 {
+ public void f(C.D arg) {
+ }
+}
+
+class B extends J_1 {
+ public void g(C.D arg) {
+ }
+}
+
+class C extends B {
+ public class D {
+ }
+}
diff --git a/test/files/pos/cycle/X_2.scala b/test/files/pos/cycle/X_2.scala
new file mode 100644
index 0000000000..c1840f3b99
--- /dev/null
+++ b/test/files/pos/cycle/X_2.scala
@@ -0,0 +1,3 @@
+import bar.J_1._ //<--- illegal cyclic reference involving
+
+class X
diff --git a/test/files/pos/depmet_implicit_chaining_zw.scala b/test/files/pos/depmet_implicit_chaining_zw.scala
index 93da3b0f8e..ce5ea476d8 100644
--- a/test/files/pos/depmet_implicit_chaining_zw.scala
+++ b/test/files/pos/depmet_implicit_chaining_zw.scala
@@ -3,7 +3,7 @@ trait Succ[N]
trait ZipWith[N, S] {
type T
- val x: T = error("")
+ val x: T = sys.error("")
}
object ZipWith {
@@ -15,7 +15,7 @@ object ZipWith {
type T = Stream[S] => zWith.T // dependent types replace the associated types functionality
}
- // can't use implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]],
+ // can't use implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]],
// since that will chop of the {type T = ... } refinement in adapt (pt = ZipWith[Succ[Succ[Zero]], Int => String => Boolean])
// this works
// def zipWith(implicit zw: ZipWith[Succ[Succ[Zero]], Int => String => Boolean]): zw.T = zw.x
@@ -25,4 +25,4 @@ object ZipWith {
type _2 = Succ[Succ[Zero]]
val zw = ?[ZipWith[_2, Int => String => Boolean]].x // : Stream[Int] => Stream[String] => Stream[Boolean]
// val zw = implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]{type T = Stream[Int] => Stream[String] => Stream[Boolean]}].x
-} \ No newline at end of file
+}
diff --git a/test/files/pos/depmet_implicit_norm_ret.scala b/test/files/pos/depmet_implicit_norm_ret.scala
index bafd2f7c51..0c587cf164 100644
--- a/test/files/pos/depmet_implicit_norm_ret.scala
+++ b/test/files/pos/depmet_implicit_norm_ret.scala
@@ -1,29 +1,29 @@
object Test{
def ?[S <: AnyRef](implicit w : S) : w.type = w
-
+
// fallback, lower priority (overloading rules apply: pick alternative in subclass lowest in subtyping lattice)
class ZipWithDefault {
implicit def ZeroZipWith[S] = new ZipWith[S] {
type T = Stream[S]
- }
+ }
}
-
+
object ZipWith extends ZipWithDefault {
// def apply[S: ZipWith](s : S) = ?[ZipWith[S]].zipWith(s) // TODO: bug return type should be inferred
def apply[S](s : S)(implicit zw: ZipWith[S]): zw.T = zw.zipWith(s)
implicit def SuccZipWith[S,R](implicit zWith : ZipWith[R]) = new ZipWith[S => R] {
type T = Stream[S] => zWith.T // dependent types replace the associated types functionality
- }
+ }
}
-
+
trait ZipWith[S] {
type T
- def zipWith : S => T = error("")
+ def zipWith : S => T = sys.error("")
}
-
+
// bug: inferred return type = (Stream[A]) => java.lang.Object with Test.ZipWith[B]{type T = Stream[B]}#T
// this seems incompatible with vvvvvvvvvvvvvvvvvvvvvv -- #3731
- def map[A,B](f : A => B) /* : Stream[A] => Stream[B]*/ = ZipWith(f)
- val tst: Stream[Int] = map{x: String => x.length}(Stream("a"))
-} \ No newline at end of file
+ def map[A,B](f : A => B) /* : Stream[A] => Stream[B]*/ = ZipWith(f)
+ val tst: Stream[Int] = map{x: String => x.length}(Stream("a"))
+}
diff --git a/test/files/pos/depmet_implicit_oopsla_session_simpler.scala b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
index d2986ef56f..7c9af66611 100644
--- a/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
+++ b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala
@@ -5,7 +5,7 @@ object Sessions {
def run(dp: Dual): Unit
}
- sealed case class Stop extends Session {
+ sealed case class Stop() extends Session {
type Dual = Stop
def run(dp: Dual): Unit = {}
diff --git a/test/pending/pos/exhaust_2.scala b/test/files/pos/exhaust_2.scala
index 4f4e47c43b..4f4e47c43b 100644
--- a/test/pending/pos/exhaust_2.scala
+++ b/test/files/pos/exhaust_2.scala
diff --git a/test/files/pos/implicits-new.scala b/test/files/pos/implicits-new.scala
index ffc387132a..7b4f20c6c9 100644
--- a/test/files/pos/implicits-new.scala
+++ b/test/files/pos/implicits-new.scala
@@ -3,9 +3,9 @@ import scala.reflect.{ClassTag, classTag}
// #1435
object t1435 {
- implicit def a(s:String):String = error("")
- implicit def a(i:Int):String = error("")
- implicit def b(i:Int):String = error("")
+ implicit def a(s:String):String = sys.error("")
+ implicit def a(i:Int):String = sys.error("")
+ implicit def b(i:Int):String = sys.error("")
}
class C1435 {
@@ -89,4 +89,4 @@ package foo2709 {
// Problem with specs
object specsProblem {
println(implicitly[TypeTag[Class[_]]])
-} \ No newline at end of file
+}
diff --git a/test/files/pos/implicits-old.scala b/test/files/pos/implicits-old.scala
index 2c01dd0ba8..62ae6b835c 100644
--- a/test/files/pos/implicits-old.scala
+++ b/test/files/pos/implicits-old.scala
@@ -1,8 +1,8 @@
// #1435
object t1435 {
- implicit def a(s:String):String = error("")
- implicit def a(i:Int):String = error("")
- implicit def b(i:Int):String = error("")
+ implicit def a(s:String):String = sys.error("")
+ implicit def a(i:Int):String = sys.error("")
+ implicit def b(i:Int):String = sys.error("")
}
class C1435 {
@@ -45,7 +45,7 @@ object Test1625 {
implicit def byName[A](x: =>A) = new Wrapped(x)
implicit def byVal[A](x: A) = x
-
+
def main(args: Array[String]) = {
// val res:Wrapped = 7 // works
@@ -57,7 +57,7 @@ object Test1625 {
}
object Test2188 {
- implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*)
+ implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*)
val x: java.util.List[String] = List("foo")
}
@@ -67,21 +67,21 @@ object TestNumericWidening {
val x: java.lang.Long = y
}
-// #2709
-package foo2709 {
- class A
- class B
-
- package object bar {
- implicit def a2b(a: A): B = new B
- }
-
- package bar {
- object test {
- new A: B
- }
- }
-}
+// #2709
+package foo2709 {
+ class A
+ class B
+
+ package object bar {
+ implicit def a2b(a: A): B = new B
+ }
+
+ package bar {
+ object test {
+ new A: B
+ }
+ }
+}
// Problem with specs
object specsProblem {
diff --git a/test/files/pos/infer2-pos.scala b/test/files/pos/infer2-pos.scala
index 06d0f5814f..0ed9666f40 100644
--- a/test/files/pos/infer2-pos.scala
+++ b/test/files/pos/infer2-pos.scala
@@ -1,7 +1,7 @@
package test
class Lst[T]
case class cons[T](x: T, xs: Lst[T]) extends Lst[T]
-case class nil[T] extends Lst[T]
+case class nil[T]() extends Lst[T]
object test {
Console.println(cons(1, nil()))
}
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.scala b/test/files/pos/javaConversions-2.10-ambiguity.scala
index e856846a29..c4aad6cbfc 100644
--- a/test/files/neg/javaConversions-2.10-ambiguity.scala
+++ b/test/files/pos/javaConversions-2.10-ambiguity.scala
@@ -5,6 +5,6 @@ import java.util.concurrent.{ConcurrentHashMap => CHM}
object Bar {
def assertType[T](t: T) = t
val a = new CHM[String, String]() += (("", ""))
- assertType[mutable.ConcurrentMap[String, String]](a)
+ assertType[concurrent.Map[String, String]](a)
}
// vim: set et:
diff --git a/test/files/pos/javaConversions-2.10-regression.scala b/test/files/pos/javaConversions-2.10-regression.scala
index e1b81015ba..7c7ff03b55 100644
--- a/test/files/pos/javaConversions-2.10-regression.scala
+++ b/test/files/pos/javaConversions-2.10-regression.scala
@@ -3,10 +3,10 @@ import JavaConversions._
import java.util.concurrent.{ConcurrentHashMap => CHM}
object Foo {
- def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
- asScalaConcurrentMap(new CHM())
+ def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] =
+ mapAsScalaConcurrentMap(new CHM())
- def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
+ def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] =
new CHM[K, V]()
}
diff --git a/test/files/pos/kinds.scala b/test/files/pos/kinds.scala
new file mode 100644
index 0000000000..6d6da0c8b6
--- /dev/null
+++ b/test/files/pos/kinds.scala
@@ -0,0 +1,13 @@
+trait IllKind1 {
+ def g(s: String): String = s
+ def f: String = ???
+ def f[C](c: C): String = g(f)
+}
+
+trait IllKind2 {
+ def b1: Char = ???
+ def b2: Byte = ???
+
+ def f1 = "abc" contains b1
+ def f2 = "abc" contains b2
+}
diff --git a/test/files/pos/liftcode_polymorphic.scala b/test/files/pos/liftcode_polymorphic.scala
index 8f537d278a..249f5a0569 100644
--- a/test/files/pos/liftcode_polymorphic.scala
+++ b/test/files/pos/liftcode_polymorphic.scala
@@ -1,6 +1,6 @@
import scala.reflect.runtime.universe._
-object Append extends Application {
+object Append extends App {
def append[A](l1: List[A], l2: List[A]):List[A] =
l1 match {
diff --git a/test/pending/pos/no-widen-locals.scala b/test/files/pos/no-widen-locals.scala
index 013e63f0a2..013e63f0a2 100644
--- a/test/pending/pos/no-widen-locals.scala
+++ b/test/files/pos/no-widen-locals.scala
diff --git a/test/files/pos/relax_implicit_divergence.scala b/test/files/pos/relax_implicit_divergence.scala
index 8525c84bab..f17d0239d8 100644
--- a/test/files/pos/relax_implicit_divergence.scala
+++ b/test/files/pos/relax_implicit_divergence.scala
@@ -1,7 +1,7 @@
class A(val options: Seq[String])
object Test {
- implicit def ss: Equiv[Seq[String]] = error("dummy")
- implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = error("dummy")
+ implicit def ss: Equiv[Seq[String]] = sys.error("dummy")
+ implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = sys.error("dummy")
implicitly[Equiv[A]]
-} \ No newline at end of file
+}
diff --git a/test/files/pos/sealed-final.flags b/test/files/pos/sealed-final.flags
new file mode 100644
index 0000000000..cfabf7a5b4
--- /dev/null
+++ b/test/files/pos/sealed-final.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Yinline-warnings -optimise \ No newline at end of file
diff --git a/test/files/pos/sealed-final.scala b/test/files/pos/sealed-final.scala
new file mode 100644
index 0000000000..bdedb5c1f6
--- /dev/null
+++ b/test/files/pos/sealed-final.scala
@@ -0,0 +1,14 @@
+sealed abstract class Foo {
+ @inline def bar(x: Int) = x + 1
+}
+object Foo {
+ def mkFoo(): Foo = new Baz2
+}
+
+object Baz1 extends Foo
+final class Baz2 extends Foo
+
+object Test {
+ // bar should be inlined now
+ def f = Foo.mkFoo() bar 10
+}
diff --git a/test/files/pos/simple-exceptions.scala b/test/files/pos/simple-exceptions.scala
index 38f2fc8500..a9f16bf90b 100644
--- a/test/files/pos/simple-exceptions.scala
+++ b/test/files/pos/simple-exceptions.scala
@@ -8,7 +8,7 @@ object Test {
try {
try {
Console.println("hi!")
- error("xx")
+ sys.error("xx")
}
finally Console.println("ho!")
}
diff --git a/test/files/pos/spec-Function1.scala b/test/files/pos/spec-Function1.scala
index 5b6af67a74..2a3074fd14 100644
--- a/test/files/pos/spec-Function1.scala
+++ b/test/files/pos/spec-Function1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/files/pos/spec-annotations.scala b/test/files/pos/spec-annotations.scala
index 48281e5df5..b23abf48e8 100644
--- a/test/files/pos/spec-annotations.scala
+++ b/test/files/pos/spec-annotations.scala
@@ -1,7 +1,7 @@
class ann(i: Int) extends scala.annotation.Annotation
// annotations on abstract types
-abstract class C1[@serializable @cloneable +T, U, V[_]]
+abstract class C1[@annotation.elidable(0) +T, U, V[_]]
abstract class C2[@deprecated
@ann(1) T <: Number,
V]
diff --git a/test/files/pos/spec-arrays.scala b/test/files/pos/spec-arrays.scala
index 84f6eef071..7ae2cb1efb 100644
--- a/test/files/pos/spec-arrays.scala
+++ b/test/files/pos/spec-arrays.scala
@@ -177,38 +177,11 @@ class ScalaSpec3Test extends Test {
}
}
-object TestJava extends scala.testing.Benchmark {
- def run() {
- (new JavaTest).run()
- }
-}
-
-object TestSpec extends scala.testing.Benchmark {
- def run() {
- (new ScalaSpecTest).run()
- }
-}
-
-object TestSpec2 extends scala.testing.Benchmark {
- def run() {
- (new ScalaSpec2Test).run()
- }
-}
-
-object TestGen extends scala.testing.Benchmark {
- def run() {
- (new ScalaGenTest).run()
- }
-}
-
-object TestWrap extends scala.testing.Benchmark {
- def run() {
- (new ScalaWrapTest).run()
- }
-}
-
-object TestSpec3 extends scala.testing.Benchmark {
- def run() {
- (new ScalaSpec3Test).run()
- }
+object TestRunner {
+ (new JavaTest).run()
+ (new ScalaSpecTest).run()
+ (new ScalaSpec2Test).run()
+ (new ScalaGenTest).run()
+ (new ScalaWrapTest).run()
+ (new ScalaSpec3Test).run()
}
diff --git a/test/files/pos/spec-asseenfrom.scala b/test/files/pos/spec-asseenfrom.scala
index cf20fc5ffa..ede5791709 100644
--- a/test/files/pos/spec-asseenfrom.scala
+++ b/test/files/pos/spec-asseenfrom.scala
@@ -1,8 +1,8 @@
-class Automaton[@specialized(Double) W,State] {
+class Automaton[@specialized(Double) W,State] {
- def finalWeight(s: State): W = error("todo");
+ def finalWeight(s: State): W = sys.error("todo");
- def allStates: Set[State] = error("toodo");
+ def allStates: Set[State] = sys.error("toodo");
/**
* Returns a map from states to its final weight. may expand all nodes.
diff --git a/test/files/pos/spec-cyclic.scala b/test/files/pos/spec-cyclic.scala
index b983caa6db..6cd7685370 100644
--- a/test/files/pos/spec-cyclic.scala
+++ b/test/files/pos/spec-cyclic.scala
@@ -6,25 +6,25 @@ trait MyPartialFunction[-A, +B] extends AnyRef with AbsFun[A, B]
trait ColMap[A, +B] extends MyPartialFunction[A, B] /*with Collection[(A, B)] */
-trait ColSorted[K,+A] extends ColRanged[K,A]
+trait ColSorted[K,+A] extends ColRanged[K,A]
-trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]]
+trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]]
trait MutMap[A, B] extends AnyRef
with ColMap[A, B]
-trait ColRanged[K, +A] //extends Iterable[A]
+trait ColRanged[K, +A] //extends Iterable[A]
trait JclRanged[K,A] extends ColRanged[K,A] //with MutableIterable[A] {
-trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E]
+trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E]
trait JclSorted[K,A] extends ColSorted[K,A] with JclRanged[K,A]
trait JclSortedMap[K,E] extends ColSortedMap[K,E] with JclMap[K,E] with JclSorted[K,Tuple2[K,E]]
class Foo[A, B] extends JclSortedMap[A, B] {
- def apply(x: A): B = error("NYI")
+ def apply(x: A): B = sys.error("NYI")
}
class Bar {
diff --git a/test/files/pos/spec-funs.scala b/test/files/pos/spec-funs.scala
index 611ec0ef62..b9acbe171a 100644
--- a/test/files/pos/spec-funs.scala
+++ b/test/files/pos/spec-funs.scala
@@ -54,10 +54,7 @@ final class ClosureTest {
}
}
-object TestInt extends scala.testing.Benchmark {
- def run() = (new IntTest).run()
-}
-
-object TestClosure extends scala.testing.Benchmark {
- def run() = (new ClosureTest).run()
+object TestRunner {
+ (new IntTest).run()
+ (new ClosureTest).run()
}
diff --git a/test/files/pos/spec-sealed.scala b/test/files/pos/spec-sealed.scala
index 5782930899..d7ecfaaabd 100644
--- a/test/files/pos/spec-sealed.scala
+++ b/test/files/pos/spec-sealed.scala
@@ -2,13 +2,13 @@ sealed abstract class MyList[@specialized +A] {
def head: A
def tail: MyList[A]
- def ::[@specialized B >: A](x: B): MyList[B] =
+ def ::[@specialized B >: A](x: B): MyList[B] =
new Cons[B](x, this)
}
case object MyNil extends MyList[Nothing] {
- def head = error("nil")
- def tail = error("nil")
+ def head = sys.error("nil")
+ def tail = sys.error("nil")
}
case class Cons[@specialized a](private val hd: a, tl: MyList[a]) extends MyList[a] {
@@ -19,7 +19,7 @@ case class Cons[@specialized a](private val hd: a, tl: MyList[a]) extends MyList
abstract class IntList extends MyList[Int]
object Main extends App {
- val xs = 1 :: 2 :: 3 :: MyNil
+ val xs = 1 :: 2 :: 3 :: MyNil
println(xs)
}
diff --git a/test/files/pos/spec-sparsearray-new.scala b/test/files/pos/spec-sparsearray-new.scala
index 7b3934c476..df31089fe2 100644
--- a/test/files/pos/spec-sparsearray-new.scala
+++ b/test/files/pos/spec-sparsearray-new.scala
@@ -4,7 +4,7 @@ import scala.collection.mutable.MapLike
class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
override def get(x: Int) = {
val ind = findOffset(x)
- if(ind < 0) None else Some(error("ignore"))
+ if(ind < 0) None else Some(sys.error("ignore"))
}
/**
@@ -13,13 +13,13 @@ class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[I
* negative and can be converted into an insertion point with -(rv+1).
*/
private def findOffset(i : Int) : Int = {
- error("impl doesn't matter")
+ sys.error("impl doesn't matter")
}
- override def apply(i : Int) : T = { error("ignore") }
- override def update(i : Int, value : T) = error("ignore")
+ override def apply(i : Int) : T = { sys.error("ignore") }
+ override def update(i : Int, value : T) = sys.error("ignore")
override def empty = new SparseArray[T]
- def -=(ind: Int) = error("ignore")
- def +=(kv: (Int,T)) = error("ignore")
- override final def iterator = error("ignore")
-} \ No newline at end of file
+ def -=(ind: Int) = sys.error("ignore")
+ def +=(kv: (Int,T)) = sys.error("ignore")
+ override final def iterator = sys.error("ignore")
+}
diff --git a/test/files/pos/spec-sparsearray-old.scala b/test/files/pos/spec-sparsearray-old.scala
index ea7710a785..e10dabd542 100644
--- a/test/files/pos/spec-sparsearray-old.scala
+++ b/test/files/pos/spec-sparsearray-old.scala
@@ -3,7 +3,7 @@ import scala.collection.mutable.MapLike
class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
override def get(x: Int) = {
val ind = findOffset(x)
- if(ind < 0) None else Some(error("ignore"))
+ if(ind < 0) None else Some(sys.error("ignore"))
}
/**
@@ -12,13 +12,13 @@ class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.
* negative and can be converted into an insertion point with -(rv+1).
*/
private def findOffset(i : Int) : Int = {
- error("impl doesn't matter")
+ sys.error("impl doesn't matter")
}
- override def apply(i : Int) : T = { error("ignore") }
- override def update(i : Int, value : T) = error("ignore")
+ override def apply(i : Int) : T = { sys.error("ignore") }
+ override def update(i : Int, value : T) = sys.error("ignore")
override def empty = new SparseArray[T]
- def -=(ind: Int) = error("ignore")
- def +=(kv: (Int,T)) = error("ignore")
- override final def iterator = error("ignore")
+ def -=(ind: Int) = sys.error("ignore")
+ def +=(kv: (Int,T)) = sys.error("ignore")
+ override final def iterator = sys.error("ignore")
}
diff --git a/test/files/pos/spec-traits.scala b/test/files/pos/spec-traits.scala
index c6cc2921b7..074f6c3d3c 100644
--- a/test/files/pos/spec-traits.scala
+++ b/test/files/pos/spec-traits.scala
@@ -11,19 +11,19 @@ class Lazy {
// issue 3307
class Bug3307 {
- def f[Z](block: String => Z) {
- block("abc")
+ def f[Z](block: String => Z) {
+ block("abc")
}
-
- ({ () =>
- f { implicit x => println(x) } })()
+
+ ({ () =>
+ f { implicit x => println(x) } })()
}
// issue 3301
trait T[X]
class Bug3301 {
- def t[A]: T[A] = error("stub")
+ def t[A]: T[A] = sys.error("stub")
() => {
type X = Int
diff --git a/test/files/pos/super.cmds b/test/files/pos/super.cmds
deleted file mode 100644
index 8f3f8a4172..0000000000
--- a/test/files/pos/super.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-javac Super_1.java
-scalac Super_2.scala
diff --git a/test/files/pos/t0031.scala b/test/files/pos/t0031.scala
index ec6eae9282..d4050c8184 100644
--- a/test/files/pos/t0031.scala
+++ b/test/files/pos/t0031.scala
@@ -4,17 +4,17 @@ object Main {
def ensure(postcondition: a => Boolean): a
}
- def require[a](precondition: => Boolean)(command: => a): Ensure[a] =
+ def require[a](precondition: => Boolean)(command: => a): Ensure[a] =
if (precondition)
new Ensure[a] {
def ensure(postcondition: a => Boolean): a = {
val result = command;
if (postcondition(result)) result
- else error("Assertion error")
+ else sys.error("Assertion error")
}
}
else
- error("Assertion error");
+ sys.error("Assertion error");
def arb[a](s: List[a]) =
require (! s.isEmpty) {
diff --git a/test/files/pos/t0227.scala b/test/files/pos/t0227.scala
index 8650350c4a..806b20d409 100644
--- a/test/files/pos/t0227.scala
+++ b/test/files/pos/t0227.scala
@@ -5,7 +5,7 @@ final class Settings {
abstract class Factory {
type libraryType <: Base
- final def apply(settings: Settings): libraryType = error("bla")
+ final def apply(settings: Settings): libraryType = sys.error("bla")
}
abstract class Base {
@@ -19,7 +19,7 @@ class SA(val settings: Settings) extends Base {
SD
) ::: settings.f(
SC
- )
+ )
}
object SC extends Factory {
diff --git a/test/files/pos/t0301.scala b/test/files/pos/t0301.scala
index cb68f38062..24b4776010 100644
--- a/test/files/pos/t0301.scala
+++ b/test/files/pos/t0301.scala
@@ -1,7 +1,7 @@
package fos
abstract class Expr
-case class Var extends Expr
+case class Var() extends Expr
object Analyzer {
def substitution(expr: Expr, cls: (Var,Var)): Expr =
diff --git a/test/files/pos/t0422.scala b/test/files/pos/t0422.scala
index cb3ba279d4..2adfa392d2 100644
--- a/test/files/pos/t0422.scala
+++ b/test/files/pos/t0422.scala
@@ -1,5 +1,4 @@
-import scala.util.regexp.WordExp;
-import scala.util.automata.WordBerrySethi;
+package scala.xml.dtd.impl
object BoolWordExp extends WordExp {
type _labelT = MyLabels;
diff --git a/test/files/pos/t0851.scala b/test/files/pos/t0851.scala
new file mode 100644
index 0000000000..fc7109dcd4
--- /dev/null
+++ b/test/files/pos/t0851.scala
@@ -0,0 +1,14 @@
+package test
+
+object test1 {
+ case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){
+ def apply(t : T) = (s:T2) => f(t,s)
+ def apply(p : (T,T2)) = f(p._1,p._2)
+ }
+ implicit def g[T](f : (T,String) => String) = Foo(f)
+ def main(args : Array[String]) : Unit = {
+ val f = (x:Int,s:String) => s + x
+ println(f(1))
+ ()
+ }
+}
diff --git a/test/files/pos/t0872.scala b/test/files/pos/t0872.scala
new file mode 100644
index 0000000000..8f4c1c4436
--- /dev/null
+++ b/test/files/pos/t0872.scala
@@ -0,0 +1,8 @@
+object Main {
+ def main(args : Array[String]) {
+ val fn = (a : Int, str : String) => "a: " + a + ", str: " + str
+ implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null)
+ println(fn(1))
+ ()
+ }
+}
diff --git a/test/files/pos/t1029.cmds b/test/files/pos/t1029.cmds
deleted file mode 100644
index 06b863dc03..0000000000
--- a/test/files/pos/t1029.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac Test_1.scala
-scalac Test_2.scala
diff --git a/test/files/pos/t1107.scala b/test/files/pos/t1107a.scala
index 0bf40bb4cc..0bf40bb4cc 100644
--- a/test/files/pos/t1107.scala
+++ b/test/files/pos/t1107a.scala
diff --git a/test/files/pos/t1203.scala b/test/files/pos/t1203a.scala
index 062ef93fc6..062ef93fc6 100644
--- a/test/files/pos/t1203.scala
+++ b/test/files/pos/t1203a.scala
diff --git a/test/files/pos/t1230/S.scala b/test/files/pos/t1230/S.scala
index f8a691b6de..530dd4b853 100644
--- a/test/files/pos/t1230/S.scala
+++ b/test/files/pos/t1230/S.scala
@@ -1 +1 @@
-object S extends Application { (new J).foo = 5 }
+object S extends App { (new J).foo = 5 }
diff --git a/test/files/pos/t1231/S.scala b/test/files/pos/t1231/S.scala
index ee08866e04..f14aa2561b 100644
--- a/test/files/pos/t1231/S.scala
+++ b/test/files/pos/t1231/S.scala
@@ -1 +1 @@
-object S extends Application { println(J.j1) }
+object S extends App { println(J.j1) }
diff --git a/test/files/pos/t1385.scala b/test/files/pos/t1385.scala
index 59953bcc39..6fe7308281 100644
--- a/test/files/pos/t1385.scala
+++ b/test/files/pos/t1385.scala
@@ -1,3 +1,3 @@
-@serializable object Test {
- private def readResolve:AnyRef = this
+object Test extends Serializable {
+ private def readResolve: AnyRef = this
}
diff --git a/test/files/pos/t1439.flags b/test/files/pos/t1439.flags
index 1e70f5c5c7..bca57e4785 100644
--- a/test/files/pos/t1439.flags
+++ b/test/files/pos/t1439.flags
@@ -1 +1 @@
--unchecked -Xfatal-warnings -Xoldpatmat -language:higherKinds
+-unchecked -Xfatal-warnings -language:higherKinds
diff --git a/test/pending/pos/t1751/A1_2.scala b/test/files/pos/t1751/A1_2.scala
index 354d5eecd0..354d5eecd0 100644
--- a/test/pending/pos/t1751/A1_2.scala
+++ b/test/files/pos/t1751/A1_2.scala
diff --git a/test/pending/pos/t1751/A2_1.scala b/test/files/pos/t1751/A2_1.scala
index c768062e43..c768062e43 100644
--- a/test/pending/pos/t1751/A2_1.scala
+++ b/test/files/pos/t1751/A2_1.scala
diff --git a/test/pending/pos/t1751/SuiteClasses.java b/test/files/pos/t1751/SuiteClasses.java
index a415e4f572..a415e4f572 100644
--- a/test/pending/pos/t1751/SuiteClasses.java
+++ b/test/files/pos/t1751/SuiteClasses.java
diff --git a/test/pending/pos/t1782/Ann.java b/test/files/pos/t1782/Ann.java
index 0dcfbd2ed7..0dcfbd2ed7 100644
--- a/test/pending/pos/t1782/Ann.java
+++ b/test/files/pos/t1782/Ann.java
diff --git a/test/pending/pos/t1782/Days.java b/test/files/pos/t1782/Days.java
index 203a87b1c2..203a87b1c2 100644
--- a/test/pending/pos/t1782/Days.java
+++ b/test/files/pos/t1782/Days.java
diff --git a/test/pending/pos/t1782/ImplementedBy.java b/test/files/pos/t1782/ImplementedBy.java
index 6aa8b4fa9e..6aa8b4fa9e 100644
--- a/test/pending/pos/t1782/ImplementedBy.java
+++ b/test/files/pos/t1782/ImplementedBy.java
diff --git a/test/pending/pos/t1782/Test_1.scala b/test/files/pos/t1782/Test_1.scala
index 6467a74c29..6467a74c29 100644
--- a/test/pending/pos/t1782/Test_1.scala
+++ b/test/files/pos/t1782/Test_1.scala
diff --git a/test/files/pos/t1942.cmds b/test/files/pos/t1942.cmds
deleted file mode 100644
index c14311042a..0000000000
--- a/test/files/pos/t1942.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac A_1.scala
-scalac Test_2.scala
diff --git a/test/files/pos/t2331.scala b/test/files/pos/t2331.scala
index 9a15b5c2a9..a7f80ac98e 100644
--- a/test/files/pos/t2331.scala
+++ b/test/files/pos/t2331.scala
@@ -4,8 +4,8 @@ trait C {
object Test {
val o /*: C --> no crash*/ = new C {
- def m[T]: Nothing /*: T --> no crash*/ = error("omitted")
+ def m[T]: Nothing /*: T --> no crash*/ = sys.error("omitted")
}
o.m[Nothing]
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t2421.scala b/test/files/pos/t2421.scala
index 26e485c160..2544a1cb36 100644
--- a/test/files/pos/t2421.scala
+++ b/test/files/pos/t2421.scala
@@ -1,14 +1,14 @@
object Test {
abstract class <~<[-From, +To] extends (From => To)
- implicit def trivial[A]: A <~< A = error("")
+ implicit def trivial[A]: A <~< A = sys.error("")
trait Forcible[T]
- implicit val forcibleInt: (Int <~< Forcible[Int]) = error("")
+ implicit val forcibleInt: (Int <~< Forcible[Int]) = sys.error("")
- def headProxy[P <: Forcible[Int]](implicit w: Int <~< P): P = error("")
-
- headProxy
- // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int,
+ def headProxy[P <: Forcible[Int]](implicit w: Int <~< P): P = sys.error("")
+
+ headProxy
+ // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int,
// and headProxy's type parameter P cannot be instantiated to Int
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t2421b_pos.scala b/test/files/pos/t2421b_pos.scala
index 8b848abb75..0df3461662 100644
--- a/test/files/pos/t2421b_pos.scala
+++ b/test/files/pos/t2421b_pos.scala
@@ -11,7 +11,7 @@ object Test {
f
}
-/* bug:
+/* bug:
error: ambiguous implicit values:
both method b in object Test1 of type [X <: Test1.B]Test1.F[X]
and method a in object Test1 of type => Test1.F[Test1.A]
diff --git a/test/files/pos/t2429.scala b/test/files/pos/t2429.scala
index 3ea3f9e2a5..550681b6a2 100755
--- a/test/files/pos/t2429.scala
+++ b/test/files/pos/t2429.scala
@@ -1,10 +1,10 @@
object Msg {
trait T
-
+
trait TSeq
-
+
object TSeq {
- implicit def fromSeq(s: Seq[T]): TSeq = error("stub")
+ implicit def fromSeq(s: Seq[T]): TSeq = sys.error("stub")
}
def render {
@@ -12,7 +12,7 @@ object Msg {
case (a, b) => {
a match {
case _ => b match {
- case _ => error("stub")
+ case _ => sys.error("stub")
}
}
}
@@ -20,6 +20,6 @@ object Msg {
}
}
object Oops {
- implicit def someImplicit(s: Seq[_]): String = error("stub")
+ implicit def someImplicit(s: Seq[_]): String = sys.error("stub")
def item: String = Nil map { case e: Any => e }
}
diff --git a/test/files/pos/t2464.cmds b/test/files/pos/t2464.cmds
deleted file mode 100644
index ca733ef23d..0000000000
--- a/test/files/pos/t2464.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac JavaOne.java
-scalac ScalaOne_1.scala
-scalac t2464_2.scala
diff --git a/test/files/pos/t2484.scala b/test/files/pos/t2484.scala
index 7d1b7cb03c..29f798edf9 100755
--- a/test/files/pos/t2484.scala
+++ b/test/files/pos/t2484.scala
@@ -1,7 +1,9 @@
+import concurrent.ExecutionContext.Implicits.global
+
class Admin extends javax.swing.JApplet {
val jScrollPane = new javax.swing.JScrollPane (null, 0, 0)
def t2484: Unit = {
- scala.concurrent.ops.spawn {jScrollPane.synchronized {
+ scala.concurrent.future {jScrollPane.synchronized {
def someFunction () = {}
//scala.concurrent.ops.spawn {someFunction ()}
jScrollPane.addComponentListener (new java.awt.event.ComponentAdapter {override def componentShown (e: java.awt.event.ComponentEvent) = {
diff --git a/test/files/pos/t2698.scala b/test/files/pos/t2698.scala
index 0e2662de61..7de50a13d6 100644
--- a/test/files/pos/t2698.scala
+++ b/test/files/pos/t2698.scala
@@ -1,5 +1,6 @@
+package scala.xml.dtd.impl
+
import scala.collection._
-import scala.util.regexp._
abstract class S2 {
val lang: WordExp
diff --git a/test/files/pos/t2726.cmds b/test/files/pos/t2726.cmds
deleted file mode 100644
index 5fcb18bfbb..0000000000
--- a/test/files/pos/t2726.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac SQLBuilder_1.scala
-scalac test_2.scala
diff --git a/test/files/pos/t2797.scala b/test/files/pos/t2797.scala
index 4323664e91..cf579d8de4 100644
--- a/test/files/pos/t2797.scala
+++ b/test/files/pos/t2797.scala
@@ -1,9 +1,9 @@
class MyVector[A] {
- def map[B](f: A => B): MyVector[B] = error("")
+ def map[B](f: A => B): MyVector[B] = sys.error("")
}
object Test {
def unzip[B, C](_this: MyVector[(B, C)]): (MyVector[B], MyVector[C]) = {
(_this.map{ bc => bc._1 }, _this.map{ bc => bc._2 })
}
-} \ No newline at end of file
+}
diff --git a/test/pending/pos/t294/Ann.java b/test/files/pos/t294/Ann.java
index 934ca46297..934ca46297 100644
--- a/test/pending/pos/t294/Ann.java
+++ b/test/files/pos/t294/Ann.java
diff --git a/test/pending/pos/t294/Ann2.java b/test/files/pos/t294/Ann2.java
index 025b79e794..025b79e794 100644
--- a/test/pending/pos/t294/Ann2.java
+++ b/test/files/pos/t294/Ann2.java
diff --git a/test/pending/pos/t294/Test_1.scala b/test/files/pos/t294/Test_1.scala
index ff1f34b10e..ff1f34b10e 100644
--- a/test/pending/pos/t294/Test_1.scala
+++ b/test/files/pos/t294/Test_1.scala
diff --git a/test/pending/pos/t294/Test_2.scala b/test/files/pos/t294/Test_2.scala
index 9fb1c6e175..9fb1c6e175 100644
--- a/test/pending/pos/t294/Test_2.scala
+++ b/test/files/pos/t294/Test_2.scala
diff --git a/test/files/pos/t3152.scala b/test/files/pos/t3152.scala
index a20428dbee..3d1dcbd6f0 100644
--- a/test/files/pos/t3152.scala
+++ b/test/files/pos/t3152.scala
@@ -1,13 +1,13 @@
trait Applicative[M[_]]
sealed trait MA[M[_], A] {
- def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
- // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
+ def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = sys.error("stub")
+ // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = sys.error("stub")
}
object test {
- implicit def ListMA[A](l: List[A]): MA[List, A] = error("stub")
- implicit val ao: Applicative[Option] = error("stub")
+ implicit def ListMA[A](l: List[A]): MA[List, A] = sys.error("stub")
+ implicit val ao: Applicative[Option] = sys.error("stub")
/* This compiles OK:
(Nil: List[Option[Int]]).sequence3(): Option[List[Int]]
@@ -17,4 +17,4 @@ object test {
// !!! No line number is reported with the error
(Nil: List[Option[Int]]).sequence: Option[List[Int]]
(List[Option[Int]]()).sequence: Option[List[Int]]
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3160.scala b/test/files/pos/t3160.scala
new file mode 100644
index 0000000000..3309ece160
--- /dev/null
+++ b/test/files/pos/t3160.scala
@@ -0,0 +1,6 @@
+import scala.collection.mutable._
+import scala.xml._
+
+class A {
+ def f(x: Node): Node = ???
+}
diff --git a/test/files/pos/t3252.scala b/test/files/pos/t3252.scala
index 4b8e862714..3ecc1e7cef 100644
--- a/test/files/pos/t3252.scala
+++ b/test/files/pos/t3252.scala
@@ -8,8 +8,8 @@ class A {
}
}
- private def g[T](block : => T) = error("")
+ private def g[T](block : => T) = sys.error("")
}
object B {
- def h(block : => Unit) : Nothing = error("")
-} \ No newline at end of file
+ def h(block : => Unit) : Nothing = sys.error("")
+}
diff --git a/test/files/pos/t3349/Test.scala b/test/files/pos/t3349/Test.scala
index 8174e4c4f8..595beadc20 100644
--- a/test/files/pos/t3349/Test.scala
+++ b/test/files/pos/t3349/Test.scala
@@ -1,5 +1,5 @@
object Test {
val label = "name"
- val table: Table = error("")
+ val table: Table = sys.error("")
table.addColumn( label, label.getClass )
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3363-new.scala b/test/files/pos/t3363-new.scala
index e609f4d55f..fef2bf8a72 100644
--- a/test/files/pos/t3363-new.scala
+++ b/test/files/pos/t3363-new.scala
@@ -9,7 +9,7 @@ object TestCase {
//if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
//if you remove this line, then code compiles
- lazy val m: TypeTag[T] = error("just something to make it compile")
+ lazy val m: TypeTag[T] = sys.error("just something to make it compile")
def is(xs: List[T]) = List(xs)
}
@@ -17,4 +17,4 @@ object TestCase {
println(Map(1 -> "2") is List(2))
}
- } \ No newline at end of file
+ }
diff --git a/test/files/pos/t3363-old.scala b/test/files/pos/t3363-old.scala
index bae54084ea..c08cf2a6b6 100644
--- a/test/files/pos/t3363-old.scala
+++ b/test/files/pos/t3363-old.scala
@@ -7,7 +7,7 @@ object TestCase {
//if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
//if you remove this line, then code compiles
- lazy val m: Manifest[T] = error("just something to make it compile")
+ lazy val m: Manifest[T] = sys.error("just something to make it compile")
def is(xs: List[T]) = List(xs)
}
diff --git a/test/files/pos/t342.scala b/test/files/pos/t342.scala
deleted file mode 100644
index 752b24d2ba..0000000000
--- a/test/files/pos/t342.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-object Main extends App {
-
- object Foo extends Enumeration(0, "Bar") { // 2
- val Bar = Value
- }
- import Foo._;
- Console.println(Bar)
-}
diff --git a/test/files/pos/t344.scala b/test/files/pos/t344.scala
index 8a6ad9120d..449a763af7 100644
--- a/test/files/pos/t344.scala
+++ b/test/files/pos/t344.scala
@@ -1,7 +1,7 @@
object Bug {
class A;
- case class A1 extends A;
- case class A2 extends A;
+ case class A1() extends A;
+ case class A2() extends A;
def f: A =
if (true)
A1()
diff --git a/test/files/pos/t3440.scala b/test/files/pos/t3440.scala
index 46bba1b207..0e7ca6b70f 100644
--- a/test/files/pos/t3440.scala
+++ b/test/files/pos/t3440.scala
@@ -4,15 +4,15 @@ object test {
}
case object Int8 extends SampleFormat1 {
- def readerFactory = error("")
+ def readerFactory = sys.error("")
}
case object Int16 extends SampleFormat1 {
- def readerFactory = error("")
+ def readerFactory = sys.error("")
}
-
+
(new {}: Any) match {
case 8 => Int8
case 16 => Int16
- case _ => error("")
+ case _ => sys.error("")
}
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3477.scala b/test/files/pos/t3477.scala
index 660aa55736..6a94baa6c8 100644
--- a/test/files/pos/t3477.scala
+++ b/test/files/pos/t3477.scala
@@ -1,7 +1,7 @@
class J3 {
- def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = error("")
+ def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = sys.error("")
}
object Test {
(new J3).f(Map[Int, Int]())
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3731.scala b/test/files/pos/t3731.scala
index 75938540c0..7a3cbec0f4 100644
--- a/test/files/pos/t3731.scala
+++ b/test/files/pos/t3731.scala
@@ -1,8 +1,8 @@
object Test{
trait ZW[S]{type T}
- def ZipWith[S, M <: ZW[S]]: M#T = error("ZW")
+ def ZipWith[S, M <: ZW[S]]: M#T = sys.error("ZW")
- // meh must be parameterised to force an asSeenFrom that
+ // meh must be parameterised to force an asSeenFrom that
// duplicates the refinement in the TR's pre without updating its sym
def meh[A] = ZipWith[A, ZW[A]{type T=Stream[A]}]
diff --git a/test/files/pos/t3864/tuples_1.scala b/test/files/pos/t3864/tuples_1.scala
index 1d19af6e41..5e97f8452b 100644
--- a/test/files/pos/t3864/tuples_1.scala
+++ b/test/files/pos/t3864/tuples_1.scala
@@ -1,11 +1,11 @@
-trait PimpedType[X] {
+trait EnrichedType[X] {
val value: X
}
trait Tuples {
-
-trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends PimpedType[Tuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]] {
+
+trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends EnrichedType[Tuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple15[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15))
@@ -13,8 +13,8 @@ trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends PimpedType[T
implicit def ToTuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)): Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] = new { val value = t } with Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]
-
-trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends PimpedType[Tuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]] {
+
+trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends EnrichedType[Tuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple16[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16))
@@ -22,8 +22,8 @@ trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends PimpedTyp
implicit def ToTuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)): Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] = new { val value = t } with Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]
-
-trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends PimpedType[Tuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]] {
+
+trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends EnrichedType[Tuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple17[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17))
@@ -31,8 +31,8 @@ trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends Pimped
implicit def ToTuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)): Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] = new { val value = t } with Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]
-
-trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends PimpedType[Tuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]] {
+
+trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends EnrichedType[Tuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple18[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18))
@@ -40,8 +40,8 @@ trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends Pim
implicit def ToTuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)): Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] = new { val value = t } with Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]
-
-trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends PimpedType[Tuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]] {
+
+trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends EnrichedType[Tuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple19[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19))
@@ -49,8 +49,8 @@ trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends
implicit def ToTuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)): Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] = new { val value = t } with Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]
-
-trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] extends PimpedType[Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
+
+trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] extends EnrichedType[Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple20[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20))
@@ -58,8 +58,8 @@ trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] exten
implicit def ToTuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)): Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] = new { val value = t } with Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]
-
-trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] extends PimpedType[Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] {
+
+trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] extends EnrichedType[Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple21[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _, _21: (U => UU) = identity[U] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20), _21(value._21))
@@ -67,12 +67,12 @@ trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] ex
implicit def ToTuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)): Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] = new { val value = t } with Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]
-
-trait Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] extends PimpedType[Tuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]] {
+
+trait Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] extends EnrichedType[Tuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]] {
def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)}
def toIndexedSeq[Z](implicit ev: value.type <:< Tuple22[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)}
def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU, VV](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _, _21: (U => UU) = identity[U] _, _22: (V => VV) = identity[V] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU, VV) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20), _21(value._21), _22(value._22))
}
implicit def ToTuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)): Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] = new { val value = t } with Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t3883.scala b/test/files/pos/t3883.scala
index adde0526b2..1b62c0c6d6 100644
--- a/test/files/pos/t3883.scala
+++ b/test/files/pos/t3883.scala
@@ -1,14 +1,14 @@
// need to test both orders
object A1 {
- implicit def i: Equiv[Boolean] = error("")
- implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = error("")
+ implicit def i: Equiv[Boolean] = sys.error("")
+ implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = sys.error("")
implicitly[Equiv[Boolean]]
}
object A2 {
- implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = error("")
- implicit def i: Equiv[Boolean] = error("")
+ implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = sys.error("")
+ implicit def i: Equiv[Boolean] = sys.error("")
implicitly[Equiv[Boolean]]
}
diff --git a/test/files/pos/t3927.scala b/test/files/pos/t3927.scala
index eb4c4b3be5..f5869c55d5 100644
--- a/test/files/pos/t3927.scala
+++ b/test/files/pos/t3927.scala
@@ -1,6 +1,6 @@
object A {
def x {
- implicit lazy val e: Equiv[Int] = error("")
+ implicit lazy val e: Equiv[Int] = sys.error("")
implicitly[Equiv[Int]]
}
-}
+}
diff --git a/test/files/pos/t422.scala b/test/files/pos/t422.scala
deleted file mode 100644
index cb3ba279d4..0000000000
--- a/test/files/pos/t422.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.util.regexp.WordExp;
-import scala.util.automata.WordBerrySethi;
-
-object BoolWordExp extends WordExp {
- type _labelT = MyLabels;
- type _regexpT = RegExp;
- abstract class MyLabels extends Label ;
- case class MyLabel(c:Char) extends MyLabels;
-}
-
-object MyTranslator extends WordBerrySethi {
- override val lang = BoolWordExp;
- import lang._;
- override protected def seenLabel( r:RegExp, i:Int, label: _labelT ): Unit = {
- super.seenLabel(r,i,label)
- }
-}
diff --git a/test/files/pos/t4649.flags b/test/files/pos/t4649.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t4649.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/pending/pos/t4649.scala b/test/files/pos/t4649.scala
index 0d6caa8d7a..0d6caa8d7a 100644
--- a/test/pending/pos/t4649.scala
+++ b/test/files/pos/t4649.scala
diff --git a/test/files/pos/t4744.flags b/test/files/pos/t4744.flags
new file mode 100644
index 0000000000..ca20f55172
--- /dev/null
+++ b/test/files/pos/t4744.flags
@@ -0,0 +1 @@
+-Ybreak-cycles
diff --git a/test/files/pos/t4744/Bar.scala b/test/files/pos/t4744/Bar.scala
new file mode 100644
index 0000000000..1fb6d78973
--- /dev/null
+++ b/test/files/pos/t4744/Bar.scala
@@ -0,0 +1 @@
+class Bar { val quux = new Foo[java.lang.Integer]() }
diff --git a/test/files/pos/t4744/Foo.java b/test/files/pos/t4744/Foo.java
new file mode 100644
index 0000000000..6c764d0470
--- /dev/null
+++ b/test/files/pos/t4744/Foo.java
@@ -0,0 +1 @@
+public class Foo<T extends Comparable<? super T>> {}
diff --git a/test/pending/pos/t4786.scala b/test/files/pos/t4786.scala
index f0579142b8..f0579142b8 100644
--- a/test/pending/pos/t4786.scala
+++ b/test/files/pos/t4786.scala
diff --git a/test/pending/pos/t4859.scala b/test/files/pos/t4859.scala
index ec5abd966d..284a39b7ab 100644
--- a/test/pending/pos/t4859.scala
+++ b/test/files/pos/t4859.scala
@@ -1,5 +1,7 @@
object O {
+ // error: C is not a legal prefix for a constructor
C().CC()
+ // but this works.
D().DD()
}
diff --git a/test/files/pos/t5223.scala b/test/files/pos/t5223.scala
index 0b2528e367..d81daa9907 100644
--- a/test/files/pos/t5223.scala
+++ b/test/files/pos/t5223.scala
@@ -2,5 +2,5 @@ import scala.reflect.runtime.universe._
object Foo extends App {
reify{def printf(format: String, args: Any*): String = null }
- reify{def printf(format: String, args: Any*): String = ("abc": @cloneable)}
+ reify{def printf(format: String, args: Any*): String = ("abc": @deprecated)}
} \ No newline at end of file
diff --git a/test/pending/pos/t5399a.scala b/test/files/pos/t5399a.scala
index 4ebd85ad03..4ebd85ad03 100644
--- a/test/pending/pos/t5399a.scala
+++ b/test/files/pos/t5399a.scala
diff --git a/test/pending/pos/t5606.scala b/test/files/pos/t5606.scala
index 2545271e32..2545271e32 100644
--- a/test/pending/pos/t5606.scala
+++ b/test/files/pos/t5606.scala
diff --git a/test/pending/pos/t5639/Bar.scala b/test/files/pos/t5639/Bar.scala
index f577500acd..f577500acd 100644
--- a/test/pending/pos/t5639/Bar.scala
+++ b/test/files/pos/t5639/Bar.scala
diff --git a/test/pending/pos/t5639/Foo.scala b/test/files/pos/t5639/Foo.scala
index 6602150661..6602150661 100644
--- a/test/pending/pos/t5639/Foo.scala
+++ b/test/files/pos/t5639/Foo.scala
diff --git a/test/files/pos/t5644/BoxesRunTime.java b/test/files/pos/t5644/BoxesRunTime.java
index 241bf79ac8..74c4c6b4b9 100644
--- a/test/files/pos/t5644/BoxesRunTime.java
+++ b/test/files/pos/t5644/BoxesRunTime.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/files/neg/t5692a.check b/test/files/pos/t5692a.check
index 7fbfb5dba7..7fbfb5dba7 100644
--- a/test/files/neg/t5692a.check
+++ b/test/files/pos/t5692a.check
diff --git a/test/files/neg/t5692b.flags b/test/files/pos/t5692a.flags
index cd66464f2f..cd66464f2f 100644
--- a/test/files/neg/t5692b.flags
+++ b/test/files/pos/t5692a.flags
diff --git a/test/files/neg/t5692a/Macros_1.scala b/test/files/pos/t5692a/Macros_1.scala
index 06b5a3de36..06b5a3de36 100644
--- a/test/files/neg/t5692a/Macros_1.scala
+++ b/test/files/pos/t5692a/Macros_1.scala
diff --git a/test/files/neg/t5692a/Test_2.scala b/test/files/pos/t5692a/Test_2.scala
index 08d510cc6f..08d510cc6f 100644
--- a/test/files/neg/t5692a/Test_2.scala
+++ b/test/files/pos/t5692a/Test_2.scala
diff --git a/test/files/neg/t5692b.check b/test/files/pos/t5692b.check
index 16796826b4..16796826b4 100644
--- a/test/files/neg/t5692b.check
+++ b/test/files/pos/t5692b.check
diff --git a/test/files/pos/t5692b.flags b/test/files/pos/t5692b.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/pos/t5692b.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/neg/t5692b/Macros_1.scala b/test/files/pos/t5692b/Macros_1.scala
index b28d19f903..b28d19f903 100644
--- a/test/files/neg/t5692b/Macros_1.scala
+++ b/test/files/pos/t5692b/Macros_1.scala
diff --git a/test/files/neg/t5692b/Test_2.scala b/test/files/pos/t5692b/Test_2.scala
index 08d510cc6f..08d510cc6f 100644
--- a/test/files/neg/t5692b/Test_2.scala
+++ b/test/files/pos/t5692b/Test_2.scala
diff --git a/test/files/pos/t5809.scala b/test/files/pos/t5809.scala
index 133e13c4ed..6101f546b3 100644
--- a/test/files/pos/t5809.scala
+++ b/test/files/pos/t5809.scala
@@ -1,5 +1,6 @@
package object foo {
- implicit class PimpedInt(foo: Int) {
+ implicit class EnrichedInt(foo: Int) {
def bar = ???
+ def bippy = foo
}
-} \ No newline at end of file
+}
diff --git a/test/files/pos/t5858.scala b/test/files/pos/t5858.scala
new file mode 100644
index 0000000000..f2b0f58d76
--- /dev/null
+++ b/test/files/pos/t5858.scala
@@ -0,0 +1,3 @@
+object Test {
+ new xml.Elem(null, null, xml.Null, xml.TopScope, Nil: _*) // was ambiguous
+}
diff --git a/test/files/pos/t5877.scala b/test/files/pos/t5877.scala
index c7827df99f..939013cd01 100644
--- a/test/files/pos/t5877.scala
+++ b/test/files/pos/t5877.scala
@@ -7,8 +7,8 @@ package foo {
}
package object foo {
- // Crasher: No synthetics for method PimpedFoo2: synthetics contains
- implicit class PimpedFoo2(value: Foo) {
+ // Crasher: No synthetics for method EnrichedFoo2: synthetics contains
+ implicit class EnrichedFoo2(value: Foo) {
def huzzah = ""
}
}
diff --git a/test/files/pos/t5877b.scala b/test/files/pos/t5877b.scala
index 6b8cbd473e..43a2ea2f06 100644
--- a/test/files/pos/t5877b.scala
+++ b/test/files/pos/t5877b.scala
@@ -7,7 +7,7 @@ object Test {
}
object `package` {
- implicit class PimpedFoo2(value: Foo) {
+ implicit class EnrichedFoo2(value: Foo) {
def huzzah = ""
}
}
diff --git a/test/files/pos/t6301.scala b/test/files/pos/t6301.scala
new file mode 100644
index 0000000000..fa81bbfa77
--- /dev/null
+++ b/test/files/pos/t6301.scala
@@ -0,0 +1,9 @@
+trait LoadedOver[@specialized(Int) A] {
+ def foo(x: Any): A
+ def foo(xs: String): A
+}
+
+object Test {
+ def loaded: AnyRef with LoadedOver[Int] = sys.error("")
+ loaded.foo("")
+}
diff --git a/test/files/pos/t6355pos.scala b/test/files/pos/t6355pos.scala
new file mode 100644
index 0000000000..c0e740dd68
--- /dev/null
+++ b/test/files/pos/t6355pos.scala
@@ -0,0 +1,16 @@
+import scala.language.dynamics
+
+class A extends Dynamic {
+ def applyDynamic[T1](method: String)(x1: T1): Any = 1
+ def applyDynamic[T1, T2](method: String)(x: T1, y: T2): Any = 2
+ def applyDynamic[T1, T2, T3](method: String)(x: T1, y: T2, z: T3): Any = 3
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val x = new A
+ println(x[Int](5))
+ println(x[Int, String](5, "a"))
+ println(x[Int, String, Int](5, "a", 5))
+ }
+}
diff --git a/test/files/pos/t640.scala b/test/files/pos/t640.scala
index 55f61df8af..45608bc3d4 100644
--- a/test/files/pos/t640.scala
+++ b/test/files/pos/t640.scala
@@ -1,2 +1,2 @@
-@serializable class A
-@serializable class B extends A
+class A extends Serializable
+class B extends A with Serializable
diff --git a/test/files/pos/t6447.scala b/test/files/pos/t6447.scala
new file mode 100644
index 0000000000..1c0c0f2a31
--- /dev/null
+++ b/test/files/pos/t6447.scala
@@ -0,0 +1,18 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+class X { type T }
+
+object X {
+ // this works
+ def foo(x: X): x.T = macro fooImpl
+ def fooImpl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ???
+
+ // this doesn't
+ def bar(x: X, y: X): (x.T, y.T) = macro barImpl
+ def barImpl(c: Context)(x: c.Expr[X], y: c.Expr[X]): c.Expr[(x.value.T, y.value.T)] = ???
+
+ // neither does this
+ def baz(x: X)(xs: List[x.T]): Unit = macro bazImpl
+ def bazImpl(c: Context)(x: c.Expr[X])(xs: c.Expr[List[x.value.T]]): c.Expr[Unit] = ???
+}
diff --git a/test/files/pos/t6664.scala b/test/files/pos/t6664.scala
new file mode 100644
index 0000000000..7eb85f619d
--- /dev/null
+++ b/test/files/pos/t6664.scala
@@ -0,0 +1,4 @@
+final case class A(i: Int, s: String) {
+ protected def copy(s2: String): A = A(i, s2)
+ protected def copy(i2: Int): A = A(i2, s)
+}
diff --git a/test/files/pos/t6664b.scala b/test/files/pos/t6664b.scala
new file mode 100644
index 0000000000..a622866838
--- /dev/null
+++ b/test/files/pos/t6664b.scala
@@ -0,0 +1,5 @@
+object T {
+ def A(s: String): A = new A(3, s)
+ def A(i: Int): A = A(i, "abc")
+ case class A(i: Int, s: String)
+}
diff --git a/test/files/pos/t6745.scala b/test/files/pos/t6745.scala
new file mode 100644
index 0000000000..2ab8e6d39a
--- /dev/null
+++ b/test/files/pos/t6745.scala
@@ -0,0 +1,4 @@
+class Bar(val i: Int) {
+ self: Any with AnyRef =>
+ def this() = this(0)
+}
diff --git a/test/files/pos/t6966.scala b/test/files/pos/t6966.scala
new file mode 100644
index 0000000000..23adc6d0d2
--- /dev/null
+++ b/test/files/pos/t6966.scala
@@ -0,0 +1,17 @@
+import Ordering.{Byte, comparatorToOrdering}
+trait Format[T]
+trait InputCache[T]
+object CacheIvy {
+ implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = null
+ implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = null
+ implicit def hNilCache: InputCache[HNil] = null
+ implicit def ByteArrayFormat: Format[Array[Byte]] = null
+ type :+:[H, T <: HList] = HCons[H,T]
+ implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = null
+ hConsCache[Array[Byte], HNil]
+}
+
+sealed trait HList
+sealed trait HNil extends HList
+object HNil extends HNil
+final class HCons[H, T <: HList](head : H, tail : T) extends HList \ No newline at end of file
diff --git a/test/files/pos/t715.cmds b/test/files/pos/t715.cmds
deleted file mode 100644
index 2836967fca..0000000000
--- a/test/files/pos/t715.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-scalac meredith_1.scala
-scalac runner_2.scala
diff --git a/test/files/pos/t715/meredith_1.scala b/test/files/pos/t715/meredith_1.scala
index 8261b9881a..c28afb4a9b 100644
--- a/test/files/pos/t715/meredith_1.scala
+++ b/test/files/pos/t715/meredith_1.scala
@@ -3,7 +3,7 @@ package com.sap.dspace.model.othello;
import scala.xml._
trait XMLRenderer {
- type T <: Any {def getClass() : java.lang.Class[_]}
+ type T <: Any {def getClass(): java.lang.Class[_]}
val valueTypes =
List(
classOf[java.lang.Boolean],
@@ -14,21 +14,21 @@ trait XMLRenderer {
)
def value2XML(
- value : Object,
- field : java.lang.reflect.Field,
- pojo : T
- ) : Node = {
+ value: Object,
+ field: java.lang.reflect.Field,
+ pojo: T
+ ): Node = {
value match {
- case null => Text( "null" )
+ case null => Text("null")
case vUnmatched =>
if (value.isInstanceOf[java.lang.Boolean])
- Text( value.asInstanceOf[java.lang.Boolean].toString )
+ Text(value.asInstanceOf[java.lang.Boolean].toString)
else if (value.isInstanceOf[java.lang.Integer])
- Text( value.asInstanceOf[java.lang.Integer].toString )
+ Text(value.asInstanceOf[java.lang.Integer].toString)
else if (value.isInstanceOf[java.lang.Float])
- Text( value.asInstanceOf[java.lang.Float].toString )
+ Text(value.asInstanceOf[java.lang.Float].toString)
// else if (value.isInstanceOf[T])
- // pojo2XML( value.asInstanceOf[T] )
+ // pojo2XML(value.asInstanceOf[T])
else
<unmatchedType>
<theType>
@@ -42,16 +42,16 @@ trait XMLRenderer {
}
def field2XML(
- field : java.lang.reflect.Field,
- pojo : T
- ) : Elem = {
+ field: java.lang.reflect.Field,
+ pojo: T
+ ): Elem = {
- val accessible = field.isAccessible;
- field.setAccessible( true );
+ val accessible = field.isAccessible
+ field.setAccessible(true)
// BUGBUG lgm need to disambiguate on type and possibly make
// recursive call to pojo2XML
- val fldValXML = value2XML( field.get( pojo ), field, pojo );
- field.setAccessible( accessible );
+ val fldValXML = value2XML(field.get( pojo ), field, pojo)
+ field.setAccessible( accessible )
Elem(
null,
@@ -62,37 +62,37 @@ trait XMLRenderer {
)
}
- def pojo2XML( pojo : T ) : Elem = {
+ def pojo2XML(pojo: T): Elem = {
val progeny =
for (field <- pojo.getClass.getDeclaredFields)
- yield field2XML( field, pojo );
+ yield field2XML(field, pojo)
Elem(
null,
pojo.getClass.getName,
null,
TopScope,
- progeny.asInstanceOf[Array[scala.xml.Node]] : _*
+ progeny.asInstanceOf[Array[scala.xml.Node]]: _*
)
}
}
-case class POJO2XMLRenderer( recurse : Boolean )
+case class POJO2XMLRenderer(recurse: Boolean)
extends XMLRenderer {
type T = java.io.Serializable
override def value2XML(
- value : Object,
- field : java.lang.reflect.Field,
- pojo : java.io.Serializable
- ) : Node = {
- if (recurse) super.value2XML( value, field, pojo )
- else Text( value + "" )
+ value: Object,
+ field: java.lang.reflect.Field,
+ pojo: java.io.Serializable
+ ): Node = {
+ if (recurse) super.value2XML(value, field, pojo)
+ else Text(value + "")
}
}
-object thePOJO2XMLRenderer extends POJO2XMLRenderer( true ) {
+object thePOJO2XMLRenderer extends POJO2XMLRenderer(true) {
}
-object Test extends Application {
+object Test extends App {
println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
}
diff --git a/test/files/pos/t715/runner_2.scala b/test/files/pos/t715/runner_2.scala
index 1e4f40d654..d54805629a 100644
--- a/test/files/pos/t715/runner_2.scala
+++ b/test/files/pos/t715/runner_2.scala
@@ -1,3 +1,3 @@
-object Test extends Application {
+object Test extends App {
println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
}
diff --git a/test/files/pos/t911.scala b/test/files/pos/t911.scala
index 224b14cda3..cfa4f49dc1 100644
--- a/test/files/pos/t911.scala
+++ b/test/files/pos/t911.scala
@@ -1,6 +1,6 @@
object Test {
-def foo : Any = {
- case class Foo {}
- Foo;
-}
+ def foo: Any = {
+ case class Foo() {}
+ Foo;
+ }
}
diff --git a/test/files/pos/tcpoly_boundedmonad.scala b/test/files/pos/tcpoly_boundedmonad.scala
index 24a911769b..8c605dc7b6 100644
--- a/test/files/pos/tcpoly_boundedmonad.scala
+++ b/test/files/pos/tcpoly_boundedmonad.scala
@@ -1,19 +1,19 @@
trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
- def map[S <: Bound[S]](f: T => S): MyType[S]
+ def map[S <: Bound[S]](f: T => S): MyType[S]
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S]): Result[S]
+ (f: T => Result[S]): Result[S]
def filter(p: T => Boolean): MyType[T]
}
class Set[T <: Ordered[T]] extends Monad[T, Set, Ordered] {
- def map[S <: Ordered[S]](f: T => S): Set[S] = error("TODO")
-
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def map[S <: Ordered[S]](f: T => S): Set[S] = sys.error("TODO")
+
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S]): Result[S] = error("TODO")
-
- def filter(p: T => Boolean): Set[T] = error("TODO")
+ (f: T => Result[S]): Result[S] = sys.error("TODO")
+
+ def filter(p: T => Boolean): Set[T] = sys.error("TODO")
}
diff --git a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
index 97594d506d..f719972a17 100644
--- a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
+++ b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala
@@ -2,15 +2,15 @@ import scala.collection.generic.GenericTraversableTemplate
import scala.collection.Iterable
class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) {
- def unzip: (CC[A1], CC[A2]) = error("foo")
+ def unzip: (CC[A1], CC[A2]) = sys.error("foo")
}
object Test {
- implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2]))
+ implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2]))
= new IterableOps[CC, A1, A2](tuple)
-
+
val t = (List(1, 2, 3), List(6, 5, 4))
tupleOfIterableWrapper(t) unzip
-} \ No newline at end of file
+}
diff --git a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala
index 3073b298de..19243505b4 100644
--- a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala
+++ b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala
@@ -2,7 +2,7 @@ import scala.collection.generic.GenericTraversableTemplate
import scala.collection.Iterable
class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) {
- def unzip: (CC[A1], CC[A2]) = error("foo")
+ def unzip: (CC[A1], CC[A2]) = sys.error("foo")
}
object Test {
@@ -15,4 +15,4 @@ object Test {
tupleOfIterableWrapper(t) unzip
t unzip
-} \ No newline at end of file
+}
diff --git a/test/files/pos/tcpoly_overloaded.scala b/test/files/pos/tcpoly_overloaded.scala
index 4240074d85..4f6334685b 100644
--- a/test/files/pos/tcpoly_overloaded.scala
+++ b/test/files/pos/tcpoly_overloaded.scala
@@ -1,10 +1,10 @@
trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S]): Result[S]
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ (f: T => Result[S]): Result[S]
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S], foo: String): Result[S]
+ (f: T => Result[S], foo: String): Result[S]
def flatMap[S <: Bound[S]]
(f: T => MyType[S], foo: Int): MyType[S]
}
@@ -12,14 +12,14 @@ trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] {
trait Test {
def moo: MList[Int]
class MList[T](el: T) extends Monad[T, List, Any] {
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S]): Result[S] = error("foo")
- def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
+ (f: T => Result[S]): Result[S] = sys.error("foo")
+ def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_],
Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]]
- (f: T => Result[S], foo: String): Result[S] = error("foo")
+ (f: T => Result[S], foo: String): Result[S] = sys.error("foo")
def flatMap[S]
- (f: T => List[S], foo: Int): List[S] = error("foo")
+ (f: T => List[S], foo: Int): List[S] = sys.error("foo")
}
val l: MList[String] = moo.flatMap[String, List, Any, MList]((x: Int) => new MList("String"))
}
diff --git a/test/files/pos/tcpoly_subst.scala b/test/files/pos/tcpoly_subst.scala
index f8ddb9a715..88cc4d0610 100644
--- a/test/files/pos/tcpoly_subst.scala
+++ b/test/files/pos/tcpoly_subst.scala
@@ -1,4 +1,4 @@
object test {
- def make[m[x], b]: m[b] = error("foo")
+ def make[m[x], b]: m[b] = sys.error("foo")
val lst: List[Int] = make[List, Int]
}
diff --git a/test/files/pos/tcpoly_variance_pos.scala b/test/files/pos/tcpoly_variance_pos.scala
index b641716d50..b63abce202 100644
--- a/test/files/pos/tcpoly_variance_pos.scala
+++ b/test/files/pos/tcpoly_variance_pos.scala
@@ -1,7 +1,7 @@
class A[m[+x]] {
- def str: m[Object] = error("foo")
+ def str: m[Object] = sys.error("foo")
}
class B[m[+x]] extends A[m] {
- override def str: m[String] = error("foo")
+ override def str: m[String] = sys.error("foo")
}
diff --git a/test/files/pos/tcpoly_wildcards.scala b/test/files/pos/tcpoly_wildcards.scala
index d3bb86b591..f6d1b666d0 100644
--- a/test/files/pos/tcpoly_wildcards.scala
+++ b/test/files/pos/tcpoly_wildcards.scala
@@ -1,3 +1,3 @@
trait test[b[_,_]] {
- def moo[a[_, _]] = error("a")
+ def moo[a[_, _]] = sys.error("a")
}
diff --git a/test/files/pos/ticket2251.scala b/test/files/pos/ticket2251.scala
index b3afee4ea9..c220e85350 100644
--- a/test/files/pos/ticket2251.scala
+++ b/test/files/pos/ticket2251.scala
@@ -22,4 +22,18 @@ lub of List(D, C) is B[_2] forSome { type _2 >: D with C{} <: B[_1] forSome { ty
// should be: B[X] forSome {type X <: B[X]} -- can this be done automatically? for now, just detect f-bounded polymorphism and fall back to more coarse approximation
val data: List[A] = List(new C, new D)
+
+ val data2 = List(new C, new D)
+
+ val data3: List[B[X] forSome { type X <: B[_ <: A] }] = List(new C, new D)
+
+ // Not yet --
+ // val data4: List[B[X] forSome { type X <: B[X] }] = List(new C, new D)
+ // <console>:7: error: type mismatch;
+ // found : List[B[_ >: D with C <: B[_ >: D with C <: A]]]
+ // required: List[B[X] forSome { type X <: B[X] }]
+ // val data4: List[B[X] forSome { type X <: B[X] }] = List(new C, new D)
+
+ // works
+ val data5 = List[B[X] forSome { type X <: B[X] }](new C, new D)
}
diff --git a/test/files/pos/typealias_dubious.scala b/test/files/pos/typealias_dubious.scala
index 587453a037..cdba1a64d0 100644
--- a/test/files/pos/typealias_dubious.scala
+++ b/test/files/pos/typealias_dubious.scala
@@ -1,15 +1,15 @@
class MailBox {
- //class Message
+ //class Message
type Message = AnyRef
-}
-
+}
+
abstract class Actor {
private val in = new MailBox
- def send(msg: in.Message) = error("foo")
+ def send(msg: in.Message) = sys.error("foo")
- def unstable: Actor = error("foo")
+ def unstable: Actor = sys.error("foo")
- def dubiousSend(msg: MailBox#Message) =
+ def dubiousSend(msg: MailBox#Message) =
unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
-}
+}
diff --git a/test/files/pos/variances-flip.scala b/test/files/pos/variances-flip.scala
new file mode 100644
index 0000000000..c3ea7b571d
--- /dev/null
+++ b/test/files/pos/variances-flip.scala
@@ -0,0 +1,7 @@
+trait Foo[-A, +B, -C, +D] {
+ private[this] def b: B = ???
+ private[this] def d: D = ???
+
+ def f(p1: B => A, p2: D => C) = g(p1(b), p2(d))
+ def g(x: A, y: C) = ((b, d))
+}
diff --git a/test/files/pos/variances-local.scala b/test/files/pos/variances-local.scala
new file mode 100644
index 0000000000..35e395095c
--- /dev/null
+++ b/test/files/pos/variances-local.scala
@@ -0,0 +1,7 @@
+class Foo1[+T] {
+ private[this] type MyType = T
+}
+
+class Foo2[+T] {
+ protected[this] type MyType = T
+}
diff --git a/test/files/pos/virtpatmat_binding_opt.scala b/test/files/pos/virtpatmat_binding_opt.scala
index 962e3d7dbe..8ec931fe78 100644
--- a/test/files/pos/virtpatmat_binding_opt.scala
+++ b/test/files/pos/virtpatmat_binding_opt.scala
@@ -4,8 +4,8 @@ class Test {
case that: Test2 =>
println(that)
this
- case _ => error("meh")
+ case _ => sys.error("meh")
}
}
-class Test2 extends Test \ No newline at end of file
+class Test2 extends Test
diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check
index 3a08e2a2ea..af0154fe60 100644
--- a/test/files/presentation/callcc-interpreter.check
+++ b/test/files/presentation/callcc-interpreter.check
@@ -3,7 +3,7 @@ reload: CallccInterpreter.scala
askTypeCompletion at CallccInterpreter.scala(51,38)
================================================================================
[response] aksTypeCompletion at (51,38)
-retrieved 64 members
+retrieved 63 members
[accessible: true] `class AddcallccInterpreter.Add`
[accessible: true] `class AppcallccInterpreter.App`
[accessible: true] `class CcccallccInterpreter.Ccc`
@@ -50,7 +50,6 @@ retrieved 64 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> callccInterpreter.type`
[accessible: true] `method →[B](y: B)(callccInterpreter.type, B)`
[accessible: true] `object WrongcallccInterpreter.Wrong.type`
[accessible: true] `trait TermcallccInterpreter.Term`
@@ -60,7 +59,8 @@ retrieved 64 members
[accessible: true] `type NamecallccInterpreter.Name`
[accessible: true] `value __leftOfArrowcallccInterpreter.type`
[accessible: true] `value __resultOfEnsuringcallccInterpreter.type`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormatcallccInterpreter.type`
+[accessible: true] `value __thingToAddcallccInterpreter.type`
[accessible: true] `value term0callccInterpreter.App`
[accessible: true] `value term1callccInterpreter.App`
[accessible: true] `value term2callccInterpreter.Add`
diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala
index 371b825026..7a2eb9a588 100755
--- a/test/files/presentation/doc/doc.scala
+++ b/test/files/presentation/doc/doc.scala
@@ -37,12 +37,24 @@ object Test extends InteractiveTest {
prepre + docComment(nTags) + prepost + post
}
-
-
override lazy val compiler = {
prepareSettings(settings)
- new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase {
+ new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase with doc.ScaladocGlobalTrait {
+ outer =>
+
val global: this.type = this
+
+ override lazy val analyzer = new {
+ val global: outer.type = outer
+ } with doc.ScaladocAnalyzer with InteractiveAnalyzer {
+ override def newTyper(context: Context): InteractiveTyper with ScaladocTyper =
+ new Typer(context) with InteractiveTyper with ScaladocTyper
+ }
+
+ override lazy val loaders = new scala.tools.nsc.symtab.SymbolLoaders {
+ val global: outer.type = outer
+ }
+
def chooseLink(links: List[LinkTo]): LinkTo = links.head
def internalLink(sym: Symbol, site: Symbol) = None
def toString(link: LinkTo) = link.toString
@@ -119,7 +131,7 @@ object Test extends InteractiveTest {
case s: Seq[_] => s exists (existsText(_, text))
case p: Product => p.productIterator exists (existsText(_, text))
}
- val (derived, base) = compiler.ask { () =>
+ val (derived, base) = compiler.ask { () =>
val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
(derived, derived.ancestors(0))
}
diff --git a/test/files/presentation/ide-bug-1000349.check b/test/files/presentation/ide-bug-1000349.check
index 44a3207d75..0040300083 100644
--- a/test/files/presentation/ide-bug-1000349.check
+++ b/test/files/presentation/ide-bug-1000349.check
@@ -3,7 +3,7 @@ reload: CompletionOnEmptyArgMethod.scala
askTypeCompletion at CompletionOnEmptyArgMethod.scala(2,17)
================================================================================
[response] aksTypeCompletion at (2,17)
-retrieved 37 members
+retrieved 36 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -32,9 +32,9 @@ retrieved 37 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> Foo`
[accessible: true] `method →[B](y: B)(Foo, B)`
[accessible: true] `value __leftOfArrowFoo`
[accessible: true] `value __resultOfEnsuringFoo`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormatFoo`
+[accessible: true] `value __thingToAddFoo`
================================================================================
diff --git a/test/files/presentation/ide-bug-1000475.check b/test/files/presentation/ide-bug-1000475.check
index 34c3b557d8..7866e4af15 100644
--- a/test/files/presentation/ide-bug-1000475.check
+++ b/test/files/presentation/ide-bug-1000475.check
@@ -3,7 +3,7 @@ reload: Foo.scala
askTypeCompletion at Foo.scala(3,7)
================================================================================
[response] aksTypeCompletion at (3,7)
-retrieved 36 members
+retrieved 35 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -29,11 +29,11 @@ retrieved 36 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> Object`
[accessible: true] `method →[B](y: B)(Object, B)`
[accessible: true] `value __leftOfArrowObject`
[accessible: true] `value __resultOfEnsuringObject`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormatObject`
+[accessible: true] `value __thingToAddObject`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
================================================================================
@@ -41,7 +41,7 @@ retrieved 36 members
askTypeCompletion at Foo.scala(6,10)
================================================================================
[response] aksTypeCompletion at (6,10)
-retrieved 36 members
+retrieved 35 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -67,11 +67,11 @@ retrieved 36 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> Object`
[accessible: true] `method →[B](y: B)(Object, B)`
[accessible: true] `value __leftOfArrowObject`
[accessible: true] `value __resultOfEnsuringObject`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormatObject`
+[accessible: true] `value __thingToAddObject`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
================================================================================
@@ -79,7 +79,7 @@ retrieved 36 members
askTypeCompletion at Foo.scala(7,7)
================================================================================
[response] aksTypeCompletion at (7,7)
-retrieved 36 members
+retrieved 35 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -105,11 +105,11 @@ retrieved 36 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> Object`
[accessible: true] `method →[B](y: B)(Object, B)`
[accessible: true] `value __leftOfArrowObject`
[accessible: true] `value __resultOfEnsuringObject`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormatObject`
+[accessible: true] `value __thingToAddObject`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
================================================================================
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
index 4be98a6b21..18ecd4b536 100644
--- a/test/files/presentation/ide-bug-1000531.check
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -3,7 +3,7 @@ reload: CrashOnLoad.scala
askTypeCompletion at CrashOnLoad.scala(6,12)
================================================================================
[response] aksTypeCompletion at (6,12)
-retrieved 126 members
+retrieved 124 members
[accessible: true] `class GroupedIteratorIterator[B]#GroupedIterator`
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
@@ -12,14 +12,13 @@ retrieved 126 members
[accessible: true] `method ++[B >: B](that: => scala.collection.GenTraversableOnce[B])Iterator[B]`
[accessible: true] `method ->[B](y: B)(java.util.Iterator[B], B)`
[accessible: true] `method /:[B](z: B)(op: (B, B) => B)B`
-[accessible: true] `method /:\[A1 >: B](z: A1)(op: (A1, A1) => A1)A1`
[accessible: true] `method :\[B](z: B)(op: (B, B) => B)B`
[accessible: true] `method ==(x$1: Any)Boolean`
[accessible: true] `method ==(x$1: AnyRef)Boolean`
[accessible: true] `method addString(b: StringBuilder)StringBuilder`
[accessible: true] `method addString(b: StringBuilder, sep: String)StringBuilder`
[accessible: true] `method addString(b: StringBuilder, start: String, sep: String, end: String)StringBuilder`
-[accessible: true] `method aggregate[B](z: B)(seqop: (B, B) => B, combop: (B, B) => B)B`
+[accessible: true] `method aggregate[B](z: => B)(seqop: (B, B) => B, combop: (B, B) => B)B`
[accessible: true] `method asInstanceOf[T0]=> T0`
[accessible: true] `method buffered=> scala.collection.BufferedIterator[B]`
[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
@@ -115,14 +114,14 @@ retrieved 126 members
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
[accessible: true] `method withFilter(p: B => Boolean)Iterator[B]`
-[accessible: true] `method x=> java.util.Iterator[B]`
[accessible: true] `method zipAll[B, A1 >: B, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1)Iterator[(A1, B1)]`
[accessible: true] `method zipWithIndex=> Iterator[(B, Int)]`
[accessible: true] `method zip[B](that: Iterator[B])Iterator[(B, B)]`
[accessible: true] `method →[B](y: B)(java.util.Iterator[B], B)`
[accessible: true] `value __leftOfArrowjava.util.Iterator[B]`
[accessible: true] `value __resultOfEnsuringjava.util.Iterator[B]`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormatjava.util.Iterator[B]`
+[accessible: true] `value __thingToAddjava.util.Iterator[B]`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
[accessible: false] `method reversed=> List[B]`
diff --git a/test/files/presentation/implicit-member.check b/test/files/presentation/implicit-member.check
index 05d6f61699..6a23facc78 100644
--- a/test/files/presentation/implicit-member.check
+++ b/test/files/presentation/implicit-member.check
@@ -3,7 +3,7 @@ reload: ImplicitMember.scala
askTypeCompletion at ImplicitMember.scala(7,7)
================================================================================
[response] aksTypeCompletion at (7,7)
-retrieved 39 members
+retrieved 38 members
[accessible: true] `class AppliedImplicitImplicit.AppliedImplicit`
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
@@ -33,10 +33,10 @@ retrieved 39 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> Implicit.type`
[accessible: true] `method →[B](y: B)(Implicit.type, B)`
[accessible: true] `value __leftOfArrowImplicit.type`
[accessible: true] `value __resultOfEnsuringImplicit.type`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormatImplicit.type`
+[accessible: true] `value __thingToAddImplicit.type`
[accessible: true] `value xImplicit.type`
================================================================================
diff --git a/test/files/presentation/ping-pong.check b/test/files/presentation/ping-pong.check
index b666d51de5..c7a5d0b5d1 100644
--- a/test/files/presentation/ping-pong.check
+++ b/test/files/presentation/ping-pong.check
@@ -3,7 +3,7 @@ reload: PingPong.scala
askTypeCompletion at PingPong.scala(10,23)
================================================================================
[response] aksTypeCompletion at (10,23)
-retrieved 40 members
+retrieved 39 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -30,12 +30,12 @@ retrieved 40 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> Pong`
[accessible: true] `method →[B](y: B)(Pong, B)`
[accessible: true] `value __leftOfArrowPong`
[accessible: true] `value __resultOfEnsuringPong`
+[accessible: true] `value __stringToFormatPong`
+[accessible: true] `value __thingToAddPong`
[accessible: true] `value nameString`
-[accessible: true] `value selfAny`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
[accessible: false] `value pingPing`
@@ -44,7 +44,7 @@ retrieved 40 members
askTypeCompletion at PingPong.scala(19,20)
================================================================================
[response] aksTypeCompletion at (19,20)
-retrieved 40 members
+retrieved 39 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -73,12 +73,12 @@ retrieved 40 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> Ping`
[accessible: true] `method →[B](y: B)(Ping, B)`
[accessible: true] `value __leftOfArrowPing`
[accessible: true] `value __resultOfEnsuringPing`
+[accessible: true] `value __stringToFormatPing`
+[accessible: true] `value __thingToAddPing`
[accessible: true] `value pongPong`
-[accessible: true] `value selfAny`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
================================================================================
diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check
index c6d4762635..4fc7a56426 100644
--- a/test/files/presentation/t5708.check
+++ b/test/files/presentation/t5708.check
@@ -3,7 +3,7 @@ reload: Completions.scala
askTypeCompletion at Completions.scala(17,9)
================================================================================
[response] aksTypeCompletion at (17,9)
-retrieved 44 members
+retrieved 43 members
[accessible: true] `lazy value fooInt`
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
@@ -31,13 +31,13 @@ retrieved 44 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> test.Compat.type`
[accessible: true] `method →[B](y: B)(test.Compat.type, B)`
[accessible: true] `value CONST_STRINGString("constant")`
[accessible: true] `value __leftOfArrowtest.Compat.type`
[accessible: true] `value __resultOfEnsuringtest.Compat.type`
+[accessible: true] `value __stringToFormattest.Compat.type`
+[accessible: true] `value __thingToAddtest.Compat.type`
[accessible: true] `value pkgPrivateVString`
-[accessible: true] `value selfAny`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
[accessible: false] `method privateM=> String`
diff --git a/test/files/presentation/visibility.check b/test/files/presentation/visibility.check
index 3026e58f7e..e9b349ac06 100644
--- a/test/files/presentation/visibility.check
+++ b/test/files/presentation/visibility.check
@@ -3,7 +3,7 @@ reload: Completions.scala
askTypeCompletion at Completions.scala(14,12)
================================================================================
[response] aksTypeCompletion at (14,12)
-retrieved 42 members
+retrieved 41 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -36,18 +36,18 @@ retrieved 42 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> accessibility.Foo`
[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
[accessible: true] `value __leftOfArrowaccessibility.Foo`
[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormataccessibility.Foo`
+[accessible: true] `value __thingToAddaccessibility.Foo`
[accessible: false] `method secretPrivateThis()Unit`
================================================================================
askTypeCompletion at Completions.scala(16,11)
================================================================================
[response] aksTypeCompletion at (16,11)
-retrieved 42 members
+retrieved 41 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -81,17 +81,17 @@ retrieved 42 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> accessibility.Foo`
[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
[accessible: true] `value __leftOfArrowaccessibility.Foo`
[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormataccessibility.Foo`
+[accessible: true] `value __thingToAddaccessibility.Foo`
================================================================================
askTypeCompletion at Completions.scala(22,11)
================================================================================
[response] aksTypeCompletion at (22,11)
-retrieved 42 members
+retrieved 41 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -124,18 +124,18 @@ retrieved 42 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> accessibility.AccessibilityChecks`
[accessible: true] `method →[B](y: B)(accessibility.AccessibilityChecks, B)`
[accessible: true] `value __leftOfArrowaccessibility.AccessibilityChecks`
[accessible: true] `value __resultOfEnsuringaccessibility.AccessibilityChecks`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormataccessibility.AccessibilityChecks`
+[accessible: true] `value __thingToAddaccessibility.AccessibilityChecks`
[accessible: false] `method secretPrivate()Unit`
================================================================================
askTypeCompletion at Completions.scala(28,10)
================================================================================
[response] aksTypeCompletion at (28,10)
-retrieved 42 members
+retrieved 41 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -164,11 +164,11 @@ retrieved 42 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> accessibility.Foo`
[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
[accessible: true] `value __leftOfArrowaccessibility.Foo`
[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormataccessibility.Foo`
+[accessible: true] `value __thingToAddaccessibility.Foo`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
[accessible: false] `method secretPrivate()Unit`
@@ -179,7 +179,7 @@ retrieved 42 members
askTypeCompletion at Completions.scala(37,8)
================================================================================
[response] aksTypeCompletion at (37,8)
-retrieved 42 members
+retrieved 41 members
[accessible: true] `method !=(x$1: Any)Boolean`
[accessible: true] `method !=(x$1: AnyRef)Boolean`
[accessible: true] `method ##()Int`
@@ -207,11 +207,11 @@ retrieved 42 members
[accessible: true] `method wait()Unit`
[accessible: true] `method wait(x$1: Long)Unit`
[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
-[accessible: true] `method x=> accessibility.Foo`
[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
[accessible: true] `value __leftOfArrowaccessibility.Foo`
[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
-[accessible: true] `value selfAny`
+[accessible: true] `value __stringToFormataccessibility.Foo`
+[accessible: true] `value __thingToAddaccessibility.Foo`
[accessible: false] `method clone()Object`
[accessible: false] `method finalize()Unit`
[accessible: false] `method secretPrivate()Unit`
diff --git a/test/files/run/Course-2002-07.scala b/test/files/run/Course-2002-07.scala
index 7848ae3e8e..055ff74d81 100644
--- a/test/files/run/Course-2002-07.scala
+++ b/test/files/run/Course-2002-07.scala
@@ -16,13 +16,13 @@ object M0 {
def isNumber: Boolean = true;
def isSum: Boolean = false;
def numValue: Int = n;
- def leftOp: Expr = error("Number.leftOp");
- def rightOp: Expr = error("Number.rightOp");
+ def leftOp: Expr = sys.error("Number.leftOp");
+ def rightOp: Expr = sys.error("Number.rightOp");
}
class Sum(e1: Expr, e2: Expr) extends Expr {
def isNumber: Boolean = false;
def isSum: Boolean = true;
- def numValue: Int = error("Sum.numValue");
+ def numValue: Int = sys.error("Sum.numValue");
def leftOp: Expr = e1;
def rightOp: Expr = e2;
}
@@ -30,7 +30,7 @@ object M0 {
class Prod(e1: Expr, e2: Expr) extends Expr {
def isNumber: Boolean = false;
def isSum: Boolean = false;
- def numValue: Int = error("Prod.numValue");
+ def numValue: Int = sys.error("Prod.numValue");
def leftOp: Expr = e1;
def rightOp: Expr = e2;
}
@@ -38,15 +38,15 @@ object M0 {
class Var(x: String) extends Expr {
def isNumber: Boolean = false;
def isSum: Boolean = false;
- def numValue: Int = error("Var.numValue");
- def leftOp: Expr = error("Var.leftOp");
- def rightOp: Expr = error("Var.rightOp");
+ def numValue: Int = sys.error("Var.numValue");
+ def leftOp: Expr = sys.error("Var.leftOp");
+ def rightOp: Expr = sys.error("Var.rightOp");
}
def eval(e: Expr): Int = {
if (e.isNumber) e.numValue
else if (e.isSum) eval(e.leftOp) + eval(e.rightOp)
- else error("unknown expression")
+ else sys.error("unknown expression")
}
def test = {
@@ -375,7 +375,7 @@ object M9 {
object MA {
def lookup[k,v](xs: List[Pair[k,v]], k: k): v = xs match {
- case List() => error("no value for " + k)
+ case List() => sys.error("no value for " + k)
case Pair(k1,v1) :: xs1 => if (k1 == k) v1 else lookup(xs1, k)
}
@@ -410,7 +410,7 @@ object MA {
def eval(e: Expr): Int = e match {
case Number(n) => n
- case Var(_) => error("cannot evaluate variable")
+ case Var(_) => sys.error("cannot evaluate variable")
case Sum(e1, e2) => eval(e1) + eval(e2)
case Prod(e1, e2) => eval(e1) * eval(e2)
}
@@ -453,7 +453,7 @@ object Utils {
if (y == 1) x else if (y % 2 == 0) power0(x*x,y/2) else x*power0(x, y-1);
def power(x: Int, y: Int): Int = (x,y) match {
- case Pair(0,0) => error("power(0,0)")
+ case Pair(0,0) => sys.error("power(0,0)")
case Pair(0,_) => 0
case Pair(1,_) => 1
case Pair(_,0) => 1
@@ -463,7 +463,7 @@ object Utils {
}
def lookup(entries: List[(String,Int)], key: String): Int = entries match {
- case List() => error("no value for " + key)
+ case List() => sys.error("no value for " + key)
case Pair(k,v) :: _ if (k == key) => v
case _ :: rest => lookup(rest, key)
}
diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala
index 85a83e0146..38b8363661 100644
--- a/test/files/run/Course-2002-08.scala
+++ b/test/files/run/Course-2002-08.scala
@@ -33,7 +33,7 @@ object M1 {
if (0 < amount && amount <= balance) {
balance = balance - amount;
balance
- } else error("insufficient funds");
+ } else sys.error("insufficient funds");
}
def test0 = {
@@ -520,7 +520,7 @@ abstract class CircuitSimulator() extends BasicCircuitSimulator() {
val w1 = new Wire();
val w2 = new Wire();
val w3 = new Wire();
-
+
andGate(in, ctrl(1), w3);
andGate(in, ctrl(1), w2);
andGate(in, ctrlN(1), w1);
diff --git a/test/files/run/Course-2002-09.scala b/test/files/run/Course-2002-09.scala
index 384a91efd8..87f91111d8 100644
--- a/test/files/run/Course-2002-09.scala
+++ b/test/files/run/Course-2002-09.scala
@@ -8,8 +8,8 @@ trait Constraint {
}
object NoConstraint extends Constraint {
- def newValue: Unit = error("NoConstraint.newValue");
- def dropValue: Unit = error("NoConstraint.dropValue");
+ def newValue: Unit = sys.error("NoConstraint.newValue");
+ def dropValue: Unit = sys.error("NoConstraint.dropValue");
}
class Adder(a1: Quantity,a2: Quantity,sum: Quantity) extends Constraint {
@@ -47,7 +47,7 @@ class Multiplier(m1: Quantity, m2: Quantity, prod: Quantity)
class Squarer(square: Quantity, root: Quantity) extends Constraint {
def newValue: Unit = Pair(square.getValue, root.getValue) match {
- case Pair(Some(x), _ )if (x < 0) => error("Square of negative number")
+ case Pair(Some(x), _ )if (x < 0) => sys.error("Square of negative number")
case Pair(Some(x), _ ) => root.setValue(Math.sqrt(x), this)
case Pair(_ , Some(x)) => square.setValue(x*x, this)
case _ =>
@@ -72,8 +72,8 @@ class Eq(a: Quantity, b: Quantity) extends Constraint {
}
class Constant(q: Quantity, v: Double) extends Constraint {
- def newValue: Unit = error("Constant.newValue");
- def dropValue: Unit = error("Constant.dropValue");
+ def newValue: Unit = sys.error("Constant.newValue");
+ def dropValue: Unit = sys.error("Constant.dropValue");
q connect this;
q.setValue(v, this);
}
@@ -100,7 +100,7 @@ class Quantity() {
def setValue(v: Double, setter: Constraint) = value match {
case Some(v1) =>
- if (v != v1) error("Error! contradiction: " + v + " and " + v1);
+ if (v != v1) sys.error("Error! contradiction: " + v + " and " + v1);
case None =>
informant = setter; value = Some(v);
for (c <- constraints; if !(c == informant)) {
diff --git a/test/files/run/Course-2002-13.scala b/test/files/run/Course-2002-13.scala
index c266af8c32..4bd3614fb0 100644
--- a/test/files/run/Course-2002-13.scala
+++ b/test/files/run/Course-2002-13.scala
@@ -42,7 +42,7 @@ object Terms {
}
case class Binding(name: String, term: Term) {
- term match { case Var(n) if (name == n) => error("bad binding") case _ => () }
+ term match { case Var(n) if (name == n) => sys.error("bad binding") case _ => () }
override def toString() = name + " = " + term;
}
@@ -168,7 +168,7 @@ class Parser(s: String) {
var token: String = it.next;
- def syntaxError(msg: String): Unit = error(msg + ", but " + token + " found");
+ def syntaxError(msg: String): Unit = sys.error(msg + ", but " + token + " found");
def rep[a](p: => a): List[a] = {
val t = p;
diff --git a/test/files/run/all-overridden.check b/test/files/run/all-overridden.check
new file mode 100644
index 0000000000..1b620b1176
--- /dev/null
+++ b/test/files/run/all-overridden.check
@@ -0,0 +1 @@
+method g
diff --git a/test/files/run/all-overridden.scala b/test/files/run/all-overridden.scala
new file mode 100644
index 0000000000..1b798ef748
--- /dev/null
+++ b/test/files/run/all-overridden.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ trait Foo { def f: Int = 5 ; def g: Int }
+ trait Bar extends Foo { def f: Int ; def g: Int = 5 }
+
+ def main(args: Array[String]): Unit = {
+ // We should see g, but not f or $init$.
+ typeOf[Bar].declarations.toList.flatMap(_.allOverriddenSymbols) foreach println
+ }
+}
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
index 7d8d181306..297bd36bae 100644
--- a/test/files/run/analyzerPlugins.check
+++ b/test/files/run/analyzerPlugins.check
@@ -19,27 +19,27 @@ canAdaptAnnotations(Trees$Typed, Any) [1]
canAdaptAnnotations(Trees$Typed, Int) [1]
lub(List(Int @testAnn, Int)) [1]
pluginsPt(?, Trees$Annotated) [7]
-pluginsPt(?, Trees$Apply) [8]
+pluginsPt(?, Trees$Apply) [9]
pluginsPt(?, Trees$ApplyImplicitView) [2]
pluginsPt(?, Trees$Assign) [7]
-pluginsPt(?, Trees$Block) [7]
+pluginsPt(?, Trees$Block) [4]
pluginsPt(?, Trees$ClassDef) [2]
pluginsPt(?, Trees$DefDef) [14]
-pluginsPt(?, Trees$Ident) [49]
+pluginsPt(?, Trees$Ident) [51]
pluginsPt(?, Trees$If) [2]
-pluginsPt(?, Trees$Literal) [20]
+pluginsPt(?, Trees$Literal) [16]
pluginsPt(?, Trees$New) [5]
pluginsPt(?, Trees$PackageDef) [1]
pluginsPt(?, Trees$Return) [1]
-pluginsPt(?, Trees$Select) [51]
+pluginsPt(?, Trees$Select) [52]
pluginsPt(?, Trees$Super) [2]
pluginsPt(?, Trees$This) [20]
-pluginsPt(?, Trees$TypeApply) [3]
+pluginsPt(?, Trees$TypeApply) [4]
pluginsPt(?, Trees$TypeBoundsTree) [2]
pluginsPt(?, Trees$TypeDef) [1]
-pluginsPt(?, Trees$TypeTree) [37]
+pluginsPt(?, Trees$TypeTree) [39]
pluginsPt(?, Trees$Typed) [1]
-pluginsPt(?, Trees$ValDef) [23]
+pluginsPt(?, Trees$ValDef) [21]
pluginsPt(Any, Trees$Literal) [2]
pluginsPt(Any, Trees$Typed) [1]
pluginsPt(Array[Any], Trees$ArrayValue) [1]
@@ -53,7 +53,7 @@ pluginsPt(Int @testAnn, Trees$Literal) [1]
pluginsPt(Int, Trees$Apply) [1]
pluginsPt(Int, Trees$Ident) [2]
pluginsPt(Int, Trees$If) [1]
-pluginsPt(Int, Trees$Literal) [6]
+pluginsPt(Int, Trees$Literal) [5]
pluginsPt(Int, Trees$Select) [3]
pluginsPt(List, Trees$Apply) [1]
pluginsPt(List[Any], Trees$Select) [1]
@@ -65,7 +65,7 @@ pluginsPt(String, Trees$Literal) [1]
pluginsPt(String, Trees$Select) [1]
pluginsPt(String, Trees$Typed) [1]
pluginsPt(Unit, Trees$Assign) [1]
-pluginsPt(scala.annotation.Annotation, Trees$Apply) [5]
+pluginsPt(testAnn, Trees$Apply) [5]
pluginsTypeSig(<none>, Trees$Template) [2]
pluginsTypeSig(class A, Trees$ClassDef) [1]
pluginsTypeSig(class testAnn, Trees$ClassDef) [1]
@@ -82,8 +82,8 @@ pluginsTypeSig(value lub1, Trees$ValDef) [2]
pluginsTypeSig(value lub2, Trees$ValDef) [2]
pluginsTypeSig(value param, Trees$ValDef) [2]
pluginsTypeSig(value str, Trees$ValDef) [1]
-pluginsTypeSig(value x, Trees$ValDef) [5]
-pluginsTypeSig(value y, Trees$ValDef) [5]
+pluginsTypeSig(value x, Trees$ValDef) [4]
+pluginsTypeSig(value y, Trees$ValDef) [4]
pluginsTypeSig(variable count, Trees$ValDef) [3]
pluginsTypeSigAccessor(value annotField) [1]
pluginsTypeSigAccessor(value inferField) [1]
@@ -98,6 +98,7 @@ pluginsTyped(()String, Trees$Ident) [1]
pluginsTyped(()String, Trees$TypeApply) [1]
pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1]
pluginsTyped(()testAnn, Trees$Select) [10]
+pluginsTyped(()type, Trees$TypeApply) [1]
pluginsTyped((str: String)A <and> (param: Double)A, Trees$Select) [1]
pluginsTyped((x$1: Any)Boolean <and> (x: Double)Boolean <and> (x: Float)Boolean <and> (x: Long)Boolean <and> (x: Int)Boolean <and> (x: Char)Boolean <and> (x: Short)Boolean <and> (x: Byte)Boolean, Trees$Select) [1]
pluginsTyped((x$1: Int)Unit, Trees$Select) [1]
@@ -110,7 +111,7 @@ pluginsTyped(<notype>, Trees$ClassDef) [2]
pluginsTyped(<notype>, Trees$DefDef) [14]
pluginsTyped(<notype>, Trees$PackageDef) [1]
pluginsTyped(<notype>, Trees$TypeDef) [1]
-pluginsTyped(<notype>, Trees$ValDef) [23]
+pluginsTyped(<notype>, Trees$ValDef) [21]
pluginsTyped(<root>, Trees$Ident) [1]
pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
pluginsTyped(=> Double, Trees$Select) [4]
@@ -124,7 +125,7 @@ pluginsTyped(A, Trees$TypeTree) [4]
pluginsTyped(A.super.type, Trees$Super) [1]
pluginsTyped(A.this.type, Trees$This) [11]
pluginsTyped(Any, Trees$TypeTree) [1]
-pluginsTyped(AnyRef, Trees$Select) [2]
+pluginsTyped(AnyRef, Trees$Select) [4]
pluginsTyped(Array[Any], Trees$ArrayValue) [1]
pluginsTyped(Boolean @testAnn, Trees$Select) [1]
pluginsTyped(Boolean @testAnn, Trees$TypeTree) [4]
@@ -137,12 +138,12 @@ pluginsTyped(Int @testAnn, Trees$TypeTree) [2]
pluginsTyped(Int @testAnn, Trees$Typed) [2]
pluginsTyped(Int(0), Trees$Literal) [3]
pluginsTyped(Int(1) @testAnn, Trees$Typed) [1]
-pluginsTyped(Int(1), Trees$Literal) [9]
+pluginsTyped(Int(1), Trees$Literal) [8]
pluginsTyped(Int(2), Trees$Literal) [1]
pluginsTyped(Int, Trees$Apply) [1]
pluginsTyped(Int, Trees$Ident) [2]
pluginsTyped(Int, Trees$If) [2]
-pluginsTyped(Int, Trees$Select) [17]
+pluginsTyped(Int, Trees$Select) [15]
pluginsTyped(Int, Trees$TypeTree) [13]
pluginsTyped(List, Trees$Apply) [1]
pluginsTyped(List, Trees$Select) [1]
@@ -160,26 +161,27 @@ pluginsTyped(String("huhu"), Trees$Literal) [1]
pluginsTyped(String("str") @testAnn, Trees$Typed) [1]
pluginsTyped(String("str"), Trees$Literal) [1]
pluginsTyped(String("str"), Trees$Typed) [1]
-pluginsTyped(String("two"), Trees$Literal) [3]
+pluginsTyped(String("two"), Trees$Literal) [2]
pluginsTyped(String, Trees$Apply) [2]
pluginsTyped(String, Trees$Block) [2]
pluginsTyped(String, Trees$Ident) [1]
pluginsTyped(String, Trees$Select) [9]
-pluginsTyped(String, Trees$TypeTree) [8]
+pluginsTyped(String, Trees$TypeTree) [7]
pluginsTyped(Unit, Trees$Apply) [2]
pluginsTyped(Unit, Trees$Assign) [8]
-pluginsTyped(Unit, Trees$Block) [7]
+pluginsTyped(Unit, Trees$Block) [4]
pluginsTyped(Unit, Trees$If) [1]
-pluginsTyped(Unit, Trees$Literal) [8]
+pluginsTyped(Unit, Trees$Literal) [5]
pluginsTyped(Unit, Trees$TypeTree) [1]
pluginsTyped([A](xs: A*)List[A], Trees$Select) [1]
pluginsTyped([T <: Int]=> Int, Trees$Select) [1]
-pluginsTyped([T0 >: ? <: ?]()T0, Trees$Select) [1]
+pluginsTyped([T0]()T0, Trees$Select) [2]
pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1]
-pluginsTyped(annotation.type, Trees$Select) [2]
+pluginsTyped(annotation.type, Trees$Select) [4]
pluginsTyped(math.type, Trees$Select) [9]
pluginsTyped(scala.annotation.Annotation, Trees$Apply) [1]
pluginsTyped(scala.annotation.TypeConstraint, Trees$Select) [4]
+pluginsTyped(scala.annotation.TypeConstraint, Trees$TypeTree) [2]
pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2]
pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2]
pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1]
@@ -192,5 +194,7 @@ pluginsTyped(testAnn, Trees$New) [5]
pluginsTyped(testAnn, Trees$This) [1]
pluginsTyped(testAnn, Trees$TypeTree) [2]
pluginsTyped(testAnn.super.type, Trees$Super) [1]
+pluginsTyped(type, Trees$Apply) [1]
pluginsTyped(type, Trees$Select) [1]
+pluginsTyped(type, Trees$TypeTree) [1]
pluginsTypedReturn(return f, String) [1]
diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala
index daef83fa30..b20a734fe6 100644
--- a/test/files/run/analyzerPlugins.scala
+++ b/test/files/run/analyzerPlugins.scala
@@ -77,12 +77,12 @@ object Test extends DirectTest {
object analyzerPlugin extends AnalyzerPlugin {
def treeClass(t: Tree) = t.getClass.toString.split('.').last
- override def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = {
+ override def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = {
output += s"pluginsPt($pt, ${treeClass(tree)})"
pt
}
- override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
output += s"pluginsTyped($tpe, ${treeClass(tree)})"
tpe
}
@@ -98,7 +98,7 @@ object Test extends DirectTest {
}
- override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = {
output += s"canAdaptAnnotations(${treeClass(tree)}, $pt)"
false
}
diff --git a/test/files/run/array-addition.check b/test/files/run/array-addition.check
new file mode 100644
index 0000000000..7bfbd9c711
--- /dev/null
+++ b/test/files/run/array-addition.check
@@ -0,0 +1,4 @@
+Array(1, 2, 3, 4)
+Array(1, 2, 3, 4)
+Array(1)
+Array(1)
diff --git a/test/files/run/array-addition.scala b/test/files/run/array-addition.scala
new file mode 100644
index 0000000000..8def48e85c
--- /dev/null
+++ b/test/files/run/array-addition.scala
@@ -0,0 +1,11 @@
+object Test {
+ def prettyPrintArray(x: Array[_]) = println("Array(" + x.mkString(", ") + ")")
+
+ def main(args: Array[String]): Unit = {
+ prettyPrintArray(Array(1,2,3) :+ 4)
+ prettyPrintArray(1 +: Array(2,3,4))
+ prettyPrintArray(Array() :+ 1)
+ prettyPrintArray(1 +: Array())
+ }
+}
+
diff --git a/test/files/run/array-charSeq.scala b/test/files/run/array-charSeq.scala
index f7d0586f03..53796bb9d5 100644
--- a/test/files/run/array-charSeq.scala
+++ b/test/files/run/array-charSeq.scala
@@ -6,6 +6,7 @@ object Test {
def check(chars: CharSequence) {
println("\n[check '" + chars + "'] len = " + chars.length)
chars match {
+ case x: Predef.ArrayCharSequence => assert(x.__arrayOfChars eq arr, ((x.__arrayOfChars, arr)))
case x: runtime.ArrayCharSequence => assert(x.xs eq arr, ((x.xs, arr)))
case x => assert(false, x)
}
diff --git a/test/files/run/arrays.scala b/test/files/run/arrays.scala
index ecebc78a6f..c8bf80ea60 100644
--- a/test/files/run/arrays.scala
+++ b/test/files/run/arrays.scala
@@ -107,7 +107,7 @@ object Test {
val s1 = if (test1) "ok" else "KO";
val s2 = actual.toString();
val s3 = expected.toString();
- error(s0 + " - " + s1 + ": " + s2 + " != " + s3);
+ sys.error(s0 + " - " + s1 + ": " + s2 + " != " + s3);
}
checks += 1
}
diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala
index 27395683b4..bdeb1fd811 100644
--- a/test/files/run/bitsets.scala
+++ b/test/files/run/bitsets.scala
@@ -85,8 +85,8 @@ object TestImmutable {
import scala.collection.immutable.BitSet
val is0 = BitSet()
- val is1 = BitSet.fromArray(Array())
- val is2 = BitSet.fromArray(Array(4))
+ val is1 = BitSet.fromBitMask(Array())
+ val is2 = BitSet.fromBitMask(Array(4))
val is3 = BitSet.empty
Console.println("is0 = " + is0)
diff --git a/test/files/run/caseclasses.scala b/test/files/run/caseclasses.scala
index 5aafea59e3..668c984f3d 100644
--- a/test/files/run/caseclasses.scala
+++ b/test/files/run/caseclasses.scala
@@ -1,6 +1,6 @@
case class Foo(x: Int)(y: Int)
-case class Bar
+case class Bar()
abstract class Base
abstract case class Abs(x: Int) extends Base
diff --git a/test/files/run/collection-stacks.check b/test/files/run/collection-stacks.check
new file mode 100644
index 0000000000..aa25cd1fa6
--- /dev/null
+++ b/test/files/run/collection-stacks.check
@@ -0,0 +1,14 @@
+3-2-1: true
+3-2-1: true
+apply
+3: true
+3: true
+1: true
+1: true
+top
+3: true
+3: true
+pop
+2-1: true
+3: true
+2-1: true
diff --git a/test/files/run/collection-stacks.scala b/test/files/run/collection-stacks.scala
new file mode 100644
index 0000000000..be9fbbf1ae
--- /dev/null
+++ b/test/files/run/collection-stacks.scala
@@ -0,0 +1,38 @@
+import scala.collection.{ immutable, mutable }
+
+object Test extends App {
+ def mutableStack[T](xs: T*): mutable.Stack[T] = {
+ val s = new mutable.Stack[T]
+ s.pushAll(xs)
+ s
+ }
+
+ def immutableStack[T](xs: T*): immutable.Stack[T] = {
+ immutable.Stack.empty[T] pushAll xs
+ }
+
+ def check[T](expected: T, got: T) {
+ println(got + ": " + (expected == got))
+ }
+
+ // check #957
+ check("3-2-1", immutableStack(1, 2, 3).iterator.mkString("-"))
+ check("3-2-1", mutableStack(1, 2, 3).iterator.mkString("-"))
+
+ println("apply")
+ check(3, immutableStack(1, 2, 3).apply(0))
+ check(3, mutableStack(1, 2, 3).apply(0))
+ check(1, immutableStack(1, 2, 3).apply(2))
+ check(1, mutableStack(1, 2, 3).apply(2))
+
+ println("top")
+ check(3, immutableStack(1, 2, 3).top)
+ check(3, mutableStack(1, 2, 3).top)
+
+ println("pop")
+ check("2-1", immutableStack(1, 2, 3).pop.mkString("-"))
+ check(3, mutableStack(1, 2, 3).pop())
+ check("2-1", { val s = mutableStack(1, 2, 3); s.pop(); s.toList.mkString("-") })
+}
+
+// vim: set ts=2 sw=2 et:
diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala
index 19feb45101..0d4b504d3c 100644
--- a/test/files/run/compiler-asSeenFrom.scala
+++ b/test/files/run/compiler-asSeenFrom.scala
@@ -47,10 +47,10 @@ package ll {
for (p <- typeRefPrefixes ; c <- classes filter (isPossibleEnclosure(p.typeSymbol, _)) ; a <- targs) yield
typeRef(p, c, List(a))
)
-
+
val wfmt = "%-" + 25 + "s"
def to_s(x: Any): String = wfmt.format(x.toString.replaceAll("""\bll\.""", ""))
-
+
def fmt(args: Any*): String = {
(args map to_s mkString " ").replaceAll("""\s+$""", "")
}
@@ -61,7 +61,7 @@ package ll {
}
def permuteAsSeenFrom(targs: List[Type]) = (
- for {
+ for {
tp <- typeRefs(targs filterNot (_ eq NoType))
prefix <- asSeenPrefixes
if tp.prefix != prefix
@@ -72,11 +72,11 @@ package ll {
}
yield ((site, tp, prefix, seen))
)
-
+
def block(label: Any)(lines: List[String]): List[String] = {
val first = "" + label + " {"
val last = "}"
-
+
first +: lines.map(" " + _) :+ last
}
@@ -84,7 +84,7 @@ package ll {
permuteAsSeenFrom(targs).groupBy(_._1).toList.sortBy(_._1.toString) flatMap {
case (site, xs) =>
block(fmt(site)) {
- fmt("type", "seen from prefix", "is") ::
+ fmt("type", "seen from prefix", "is") ::
fmt("----", "----------------", "--") :: {
xs.groupBy(_._2).toList.sortBy(_._1.toString) flatMap {
case (tp, ys) =>
@@ -95,7 +95,7 @@ package ll {
}
}
}
-
+
def pretty(xs: List[_]) = if (xs.isEmpty) "" else xs.mkString("\n ", "\n ", "\n")
def signaturesIn(info: Type): List[String] = (
@@ -103,11 +103,11 @@ package ll {
filterNot (s => s.isType || s.owner == ObjectClass || s.owner == AnyClass || s.isConstructor)
map (_.defString)
)
-
+
def check(source: String, unit: global.CompilationUnit) = {
import syms._
- afterTyper {
+ exitingTyper {
val typeArgs = List[Type](IntClass.tpe, ListClass[Int]) ++ tparams.map(_.tpe)
permute(typeArgs) foreach println
}
diff --git a/test/files/run/constant-type.check b/test/files/run/constant-type.check
index dfd8be5297..4eededb8ba 100644
--- a/test/files/run/constant-type.check
+++ b/test/files/run/constant-type.check
@@ -13,16 +13,16 @@ scala> :power
scala> val s = transformedType(StringClass.toType).asInstanceOf[Type]
s: $r.intp.global.Type = String
-scala> { println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+scala> { println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
Class[String](classOf[java.lang.String])
-scala> { afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+scala> { exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
Class(classOf[java.lang.String])
-scala> { ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+scala> { ConstantType(Constant(s)); println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
Class[String](classOf[java.lang.String])
-scala> { ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+scala> { ConstantType(Constant(s)); exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
Class(classOf[java.lang.String])
scala>
diff --git a/test/files/run/constant-type.scala b/test/files/run/constant-type.scala
index 84539e2895..373746af4a 100644
--- a/test/files/run/constant-type.scala
+++ b/test/files/run/constant-type.scala
@@ -9,9 +9,9 @@ object Test extends ReplTest {
def code = """
:power
val s = transformedType(StringClass.toType).asInstanceOf[Type]
-{ println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
-{ afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
-{ ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
-{ ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+{ println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+{ exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+{ ConstantType(Constant(s)); println(exitingPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+{ ConstantType(Constant(s)); exitingPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
"""
}
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index 85c4f41872..8b53e2391d 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -37,7 +37,7 @@ scala> object Stuff {
val x = "hello"
val y : Int @Annot(x) = 10
}
-defined module Stuff
+defined object Stuff
scala>
@@ -127,7 +127,7 @@ defined class rep
scala>
scala> object A { val x = "hello" : String @ rep }
-defined module A
+defined object A
warning: previously defined class A is not a companion to object A.
Companions must be defined together; you may wish to use :paste mode for this.
diff --git a/test/files/run/ctries-new/iterator.scala b/test/files/run/ctries-new/iterator.scala
index b953a40e00..bb1175e61b 100644
--- a/test/files/run/ctries-new/iterator.scala
+++ b/test/files/run/ctries-new/iterator.scala
@@ -1,144 +1,134 @@
-
-
-
-
import collection._
import collection.concurrent.TrieMap
-
-
object IteratorSpec extends Spec {
-
+
def test() {
"work for an empty trie" in {
val ct = new TrieMap
val it = ct.iterator
-
+
it.hasNext shouldEqual (false)
evaluating { it.next() }.shouldProduce [NoSuchElementException]
}
-
+
def nonEmptyIteratorCheck(sz: Int) {
val ct = new TrieMap[Wrap, Int]
for (i <- 0 until sz) ct.put(new Wrap(i), i)
-
+
val it = ct.iterator
val tracker = mutable.Map[Wrap, Int]()
for (i <- 0 until sz) {
assert(it.hasNext == true)
tracker += it.next
}
-
+
it.hasNext shouldEqual (false)
evaluating { it.next() }.shouldProduce [NoSuchElementException]
tracker.size shouldEqual (sz)
tracker shouldEqual (ct)
}
-
+
"work for a 1 element trie" in {
nonEmptyIteratorCheck(1)
}
-
+
"work for a 2 element trie" in {
nonEmptyIteratorCheck(2)
}
-
+
"work for a 3 element trie" in {
nonEmptyIteratorCheck(3)
}
-
+
"work for a 5 element trie" in {
nonEmptyIteratorCheck(5)
}
-
+
"work for a 10 element trie" in {
nonEmptyIteratorCheck(10)
}
-
+
"work for a 20 element trie" in {
nonEmptyIteratorCheck(20)
}
-
+
"work for a 50 element trie" in {
nonEmptyIteratorCheck(50)
}
-
+
"work for a 100 element trie" in {
nonEmptyIteratorCheck(100)
}
-
+
"work for a 1k element trie" in {
nonEmptyIteratorCheck(1000)
}
-
+
"work for a 5k element trie" in {
nonEmptyIteratorCheck(5000)
}
-
+
"work for a 75k element trie" in {
nonEmptyIteratorCheck(75000)
}
-
+
"work for a 250k element trie" in {
nonEmptyIteratorCheck(500000)
}
-
+
def nonEmptyCollideCheck(sz: Int) {
val ct = new TrieMap[DumbHash, Int]
for (i <- 0 until sz) ct.put(new DumbHash(i), i)
-
+
val it = ct.iterator
val tracker = mutable.Map[DumbHash, Int]()
for (i <- 0 until sz) {
assert(it.hasNext == true)
tracker += it.next
}
-
+
it.hasNext shouldEqual (false)
evaluating { it.next() }.shouldProduce [NoSuchElementException]
tracker.size shouldEqual (sz)
tracker shouldEqual (ct)
}
-
+
"work for colliding hashcodes, 2 element trie" in {
nonEmptyCollideCheck(2)
}
-
+
"work for colliding hashcodes, 3 element trie" in {
nonEmptyCollideCheck(3)
}
-
+
"work for colliding hashcodes, 5 element trie" in {
nonEmptyCollideCheck(5)
}
-
+
"work for colliding hashcodes, 10 element trie" in {
nonEmptyCollideCheck(10)
}
-
+
"work for colliding hashcodes, 100 element trie" in {
nonEmptyCollideCheck(100)
}
-
+
"work for colliding hashcodes, 500 element trie" in {
nonEmptyCollideCheck(500)
}
-
+
"work for colliding hashcodes, 5k element trie" in {
nonEmptyCollideCheck(5000)
}
-
+
def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) {
if (a != b) {
println(a.size + " vs " + b.size)
- // println(a)
- // println(b)
- // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
- // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
}
assert(a == b)
}
-
+
"be consistent when taken with concurrent modifications" in {
val sz = 25000
val W = 15
@@ -146,40 +136,40 @@ object IteratorSpec extends Spec {
val checks = 5
val ct = new TrieMap[Wrap, Int]
for (i <- 0 until sz) ct.put(new Wrap(i), i)
-
+
class Modifier extends Thread {
override def run() {
for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match {
case Some(_) => ct.remove(new Wrap(i))
- case None =>
+ case None =>
}
}
}
-
+
def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) {
class Iter extends Thread {
override def run() {
val snap = ct.readOnlySnapshot()
val initial = mutable.Map[Wrap, Int]()
for (kv <- snap) initial += kv
-
+
for (i <- 0 until checks) {
assertEqual(snap.iterator.toMap, initial)
}
}
}
-
+
val iter = new Iter
iter.start()
iter.join()
}
-
+
val threads = for (_ <- 0 until W) yield new Modifier
threads.foreach(_.start())
for (_ <- 0 until S) consistentIteration(ct, checks)
threads.foreach(_.join())
}
-
+
"be consistent with a concurrent removal with a well defined order" in {
val sz = 150000
val sgroupsize = 10
@@ -187,17 +177,16 @@ object IteratorSpec extends Spec {
val removerslowdown = 50
val ct = new TrieMap[Wrap, Int]
for (i <- 0 until sz) ct.put(new Wrap(i), i)
-
+
class Remover extends Thread {
override def run() {
for (i <- 0 until sz) {
assert(ct.remove(new Wrap(i)) == Some(i))
for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate
}
- //println("done removing")
}
}
-
+
def consistentIteration(it: Iterator[(Wrap, Int)]) = {
class Iter extends Thread {
override def run() {
@@ -210,7 +199,7 @@ object IteratorSpec extends Spec {
}
new Iter
}
-
+
val remover = new Remover
remover.start()
for (_ <- 0 until sgroupnum) {
@@ -218,27 +207,25 @@ object IteratorSpec extends Spec {
iters.foreach(_.start())
iters.foreach(_.join())
}
- //println("done with iterators")
remover.join()
}
-
+
"be consistent with a concurrent insertion with a well defined order" in {
val sz = 150000
val sgroupsize = 10
val sgroupnum = 10
val inserterslowdown = 50
val ct = new TrieMap[Wrap, Int]
-
+
class Inserter extends Thread {
override def run() {
for (i <- 0 until sz) {
assert(ct.put(new Wrap(i), i) == None)
for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate
}
- //println("done inserting")
}
}
-
+
def consistentIteration(it: Iterator[(Wrap, Int)]) = {
class Iter extends Thread {
override def run() {
@@ -251,7 +238,7 @@ object IteratorSpec extends Spec {
}
new Iter
}
-
+
val inserter = new Inserter
inserter.start()
for (_ <- 0 until sgroupnum) {
@@ -259,31 +246,30 @@ object IteratorSpec extends Spec {
iters.foreach(_.start())
iters.foreach(_.join())
}
- //println("done with iterators")
inserter.join()
}
-
+
"work on a yet unevaluated snapshot" in {
val sz = 50000
val ct = new TrieMap[Wrap, Int]
for (i <- 0 until sz) ct.update(new Wrap(i), i)
-
+
val snap = ct.snapshot()
val it = snap.iterator
-
+
while (it.hasNext) it.next()
}
-
+
"be duplicated" in {
val sz = 50
val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*)
val it = ct.splitter
for (_ <- 0 until (sz / 2)) it.next()
val dupit = it.dup
-
+
it.toList shouldEqual dupit.toList
}
-
+
}
-
+
}
diff --git a/test/files/run/deeps.check b/test/files/run/deeps.check
new file mode 100644
index 0000000000..a68e474f62
--- /dev/null
+++ b/test/files/run/deeps.check
@@ -0,0 +1,87 @@
+testEquals1
+false
+false
+true
+
+testEquals2
+false
+false
+true
+
+testEquals3
+x=Array(1)
+y=Array(1)
+false
+false
+true
+
+x=Array(Array(1), Array(1))
+y=Array(Array(1), Array(1))
+false
+false
+true
+
+x=Array(Array(Array(1), Array(1)), Array(Array(1), Array(1)))
+y=Array(Array(Array(1), Array(1)), Array(Array(1), Array(1)))
+false
+false
+true
+
+testEquals4
+false
+false
+true
+false
+false
+true
+Array(true, false)
+Array(true, false)
+[true;false]
+true;false
+
+Array(Array(true, false), Array(true, false))
+Array(Array(true, false), Array(true, false))
+[Array(true, false);Array(true, false)]
+Array(true, false);Array(true, false)
+
+Array(Array(Array(true, false), Array(true, false)), Array(Array(true, false), Array(true, false)))
+Array(Array(Array(true, false), Array(true, false)), Array(Array(true, false), Array(true, false)))
+[Array(Array(true, false), Array(true, false));Array(Array(true, false), Array(true, false))]
+Array(Array(true, false), Array(true, false));Array(Array(true, false), Array(true, false))
+
+Array(1.0, 0.0)
+Array(1.0, 0.0)
+[1.0;0.0]
+1.0;0.0
+
+Array(Array(1.0, 0.0), Array(1.0, 0.0))
+Array(Array(1.0, 0.0), Array(1.0, 0.0))
+[Array(1.0, 0.0);Array(1.0, 0.0)]
+Array(1.0, 0.0);Array(1.0, 0.0)
+
+Array(Array(Array(1.0, 0.0), Array(1.0, 0.0)), Array(Array(1.0, 0.0), Array(1.0, 0.0)))
+Array(Array(Array(1.0, 0.0), Array(1.0, 0.0)), Array(Array(1.0, 0.0), Array(1.0, 0.0)))
+[Array(Array(1.0, 0.0), Array(1.0, 0.0));Array(Array(1.0, 0.0), Array(1.0, 0.0))]
+Array(Array(1.0, 0.0), Array(1.0, 0.0));Array(Array(1.0, 0.0), Array(1.0, 0.0))
+
+Array(a, b)
+Array(a, b)
+[a;b]
+a;b
+
+Array(Array(a, b), Array(a, b))
+Array(Array(a, b), Array(a, b))
+[Array(a, b);Array(a, b)]
+Array(a, b);Array(a, b)
+
+Array(Array(Array(a, b), Array(a, b)), Array(Array(a, b), Array(a, b)))
+Array(Array(Array(a, b), Array(a, b)), Array(Array(a, b), Array(a, b)))
+[Array(Array(a, b), Array(a, b));Array(Array(a, b), Array(a, b))]
+Array(Array(a, b), Array(a, b));Array(Array(a, b), Array(a, b))
+
+[Array(true, false); Array(false)]
+[Array(1, 2); Array(3)]
+[Array(1, 2); Array(3)]
+
+Array(boo, and, foo)
+Array(a)
diff --git a/test/files/run/deeps.scala b/test/files/run/deeps.scala
new file mode 100644
index 0000000000..6049cc6024
--- /dev/null
+++ b/test/files/run/deeps.scala
@@ -0,0 +1,114 @@
+//############################################################################
+// deepEquals / deep.toString
+//############################################################################
+
+//############################################################################
+// need to revisit array equqality
+object Test {
+
+ def testEquals1 {
+ println(Array(1) == Array(1))
+ println(Array(1) equals Array(1))
+ println(Array(1).deep == Array(1).deep)
+ println
+ }
+
+ def testEquals2 {
+ println(Array(Array(1), Array(2)) == Array(Array(1), Array(2)))
+ println(Array(Array(1), Array(2)) equals Array(Array(1), Array(2)))
+ println(Array(Array(1), Array(2)).deep equals Array(Array(1), Array(2)).deep)
+ println
+ }
+
+ def testEquals3 {
+ val a1 = Array(1)
+ val b1 = Array(1)
+ val a2 = Array(a1, b1)
+ val b2 = Array(a1, b1)
+ val a3 = Array(a2, b2)
+ val b3 = Array(a2, b2)
+ def test[T](x: Array[T], y: Array[T]) {
+ println("x=" + x.deep.toString)
+ println("y=" + y.deep.toString)
+ println(x == y)
+ println(x equals y)
+ println(x.deep == y.deep)
+ println
+ }
+ test(a1, b1)
+ test(a2, b2)
+ test(a3, b3)
+ }
+
+ def testEquals4 {
+ println("boo:and:foo".split(':') == "boo:and:foo".split(':'))
+ println("boo:and:foo".split(':') equals "boo:and:foo".split(':'))
+ println("boo:and:foo".split(':').deep == "boo:and:foo".split(':').deep)
+
+ val xs = new java.util.ArrayList[String](); xs.add("a")
+ val ys = new java.util.ArrayList[String](); ys.add("a")
+ println(xs.toArray == ys.toArray)
+ println(xs.toArray equals ys.toArray)
+ println(xs.toArray.deep == ys.toArray.deep)
+ }
+
+ def testToString1 {
+ def sweep(s: String) = (
+ s.replaceAll("D@[0-9a-fA-F]+", "D@0000000")
+ .replaceAll("Z@[0-9a-fA-F]+", "Z@0000000")
+ .replaceAll(";@[0-9a-fA-F]+", ";@0000000")
+ )
+ def test[T](a: Array[T]) {
+ println(sweep(a.deep.toString))
+ println(a.deep.toString)
+ println(a.deep.mkString("[", ";", "]"))
+ println(a.deep.mkString(";"))
+ println
+ }
+
+ val ba1 = Array(true, false)
+ val ba2 = Array(ba1, ba1)
+ val ba3 = Array(ba2, ba2)
+ test(ba1)
+ test(ba2)
+ test(ba3)
+
+ val da1 = Array(1.0d, 0.0d)
+ val da2 = Array(da1, da1)
+ val da3 = Array(da2, da2)
+ test(da1)
+ test(da2)
+ test(da3)
+
+ val sa1 = Array("a", "b")
+ val sa2 = Array(sa1, sa1)
+ val sa3 = Array(sa2, sa2)
+ test(sa1)
+ test(sa2)
+ test(sa3)
+ }
+
+ def testToString2 {
+ println(Array(Array(true, false), Array(false)).deep.mkString("[", "; ", "]"))
+ println(Array(Array('1', '2'), Array('3')).deep.mkString("[", "; ", "]"))
+ println(Array(Array(1, 2), Array(3)).deep.mkString("[", "; ", "]"))
+ println
+ }
+
+ def testToString3 {
+ println("boo:and:foo".split(':').deep.toString)
+
+ val xs = new java.util.ArrayList[String](); xs.add("a")
+ println(xs.toArray.deep.toString)
+ }
+
+ def main(args: Array[String]): Unit = {
+ println("testEquals1") ; testEquals1
+ println("testEquals2") ; testEquals2
+ println("testEquals3") ; testEquals3
+ println("testEquals4") ; testEquals4
+ testToString1
+ testToString2
+ testToString3
+ }
+}
diff --git a/test/files/run/enums.scala b/test/files/run/enums.scala
index 9cdeed2691..3aad7ec320 100644
--- a/test/files/run/enums.scala
+++ b/test/files/run/enums.scala
@@ -36,8 +36,11 @@ object Test2 {
object Test3 {
- object Direction extends Enumeration("North", "South", "East", "West") {
- val North, South, East, West = Value;
+ object Direction extends Enumeration {
+ val North = Value("North")
+ val South = Value("South")
+ val East = Value("East")
+ val West = Value("West")
}
def run: Int = {
@@ -48,8 +51,11 @@ object Test3 {
object Test4 {
- object Direction extends Enumeration("North", "South", "East", "West") {
- val North, South, East, West = Value;
+ object Direction extends Enumeration {
+ val North = Value("North")
+ val South = Value("South")
+ val East = Value("East")
+ val West = Value("West")
}
def run: Int = {
diff --git a/test/files/run/exceptions-2.scala b/test/files/run/exceptions-2.scala
index d0312a49b2..f5bbcca210 100644
--- a/test/files/run/exceptions-2.scala
+++ b/test/files/run/exceptions-2.scala
@@ -42,14 +42,14 @@ object NoExcep {
def method4 = try {
Console.println("..");
} catch {
- case _ => error("..");
+ case _ => sys.error("..");
}
}
object Test {
def nested1: Unit = try {
try {
- error("nnnnoooo");
+ sys.error("nnnnoooo");
} finally {
Console.println("Innermost finally");
}
@@ -59,7 +59,7 @@ object Test {
def nested2 = try {
try {
- error("nnnnoooo");
+ sys.error("nnnnoooo");
} finally {
Console.println("Innermost finally");
}
@@ -68,7 +68,7 @@ object Test {
Console.println("Outermost finally");
}
- def mixed =
+ def mixed =
try {
if (10 > 0)
throw Leaf(10);
@@ -107,7 +107,7 @@ object Test {
case Leaf(a) => Console.println(a);
}
} catch {
- case npe: NullPointerException =>
+ case npe: NullPointerException =>
Console.println("Caught an NPE");
}
@@ -134,21 +134,21 @@ object Test {
()
} finally {
try {
- error("a");
+ sys.error("a");
} catch {
case _ => Console.println("Silently ignore exception in finally");
}
}
}
- def valInFinally: Unit =
- try {
+ def valInFinally: Unit =
+ try {
} finally {
val fin = "Abc";
Console.println(fin);
};
- def tryAndValInFinally: Unit =
+ def tryAndValInFinally: Unit =
try {
} finally {
val fin = "Abc";
@@ -157,51 +157,51 @@ object Test {
} catch { case _ => () }
};
- def returnInBody: Unit = try {
+ def returnInBody: Unit = try {
try {
Console.println("Normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
Console.println("inner finally");
}
- } finally {
+ } finally {
Console.println("Outer finally");
}
- def returnInBodySynch: Unit = try {
+ def returnInBodySynch: Unit = try {
synchronized {
try {
Console.println("Synchronized normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
Console.println("inner finally");
}
}
- } finally {
+ } finally {
Console.println("Outer finally");
}
- def returnInBodyAndInFinally: Unit = try {
+ def returnInBodyAndInFinally: Unit = try {
try {
Console.println("Normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
Console.println("inner finally");
return
}
- } finally {
+ } finally {
Console.println("Outer finally");
return
}
- def returnInBodyAndInFinally2: Unit = try {
+ def returnInBodyAndInFinally2: Unit = try {
try {
Console.println("Normal execution...");
- return
+ return
Console.println("non reachable code");
} finally {
try {
@@ -211,7 +211,7 @@ object Test {
Console.println("finally inside finally");
}
}
- } finally {
+ } finally {
Console.println("Outer finally");
return
}
@@ -253,7 +253,7 @@ object Test {
}
- def returnWithFinallyClean: Int = try {
+ def returnWithFinallyClean: Int = try {
try {
Console.println("Normal execution...");
return 10
@@ -262,7 +262,7 @@ object Test {
} finally {
Console.println("inner finally");
}
- } finally {
+ } finally {
Console.println("Outer finally");
try { 1 } catch { case e: java.io.IOException => () }
}
@@ -294,7 +294,7 @@ object Test {
Console.println("mixed: ");
execute(mixed);
-
+
Console.println("withValue1:");
execute(withValue1);
@@ -322,7 +322,7 @@ object Test {
Console.println("NoExcep.method3:");
execute(NoExcep.method3);
-
+
Console.println("NoExcep.method4:");
execute(NoExcep.method4);
diff --git a/test/files/run/exceptions.scala b/test/files/run/exceptions.scala
index fc3566f85e..90f681e3c5 100644
--- a/test/files/run/exceptions.scala
+++ b/test/files/run/exceptions.scala
@@ -6,8 +6,8 @@
abstract class IntMap[A] {
def lookup(key: Int): A = this match {
- case Empty() => error("KO")
- case _ => error("ok")
+ case Empty() => sys.error("KO")
+ case _ => sys.error("ok")
}
}
diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check
index 0d7a9298b4..b0d852865d 100644
--- a/test/files/run/existentials-in-compiler.check
+++ b/test/files/run/existentials-in-compiler.check
@@ -8,22 +8,22 @@ abstract trait BippyLike[A <: AnyRef, B <: List[A], This <: extest.BippyLike[A,B
extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] }
abstract trait Contra[-A >: AnyRef, -B] extends AnyRef
- extest.Contra[_ >: AnyRef, _]
+ extest.Contra[AnyRef, _]
abstract trait ContraLike[-A >: AnyRef, -B >: List[A]] extends AnyRef
extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] }
abstract trait Cov01[+A <: AnyRef, +B] extends AnyRef
- extest.Cov01[_ <: AnyRef, _]
+ extest.Cov01[AnyRef,Any]
abstract trait Cov02[+A <: AnyRef, B] extends AnyRef
- extest.Cov02[_ <: AnyRef, _]
+ extest.Cov02[AnyRef, _]
abstract trait Cov03[+A <: AnyRef, -B] extends AnyRef
- extest.Cov03[_ <: AnyRef, _]
+ extest.Cov03[AnyRef, _]
abstract trait Cov04[A <: AnyRef, +B] extends AnyRef
- extest.Cov04[_ <: AnyRef, _]
+ extest.Cov04[_ <: AnyRef, Any]
abstract trait Cov05[A <: AnyRef, B] extends AnyRef
extest.Cov05[_ <: AnyRef, _]
@@ -32,7 +32,7 @@ abstract trait Cov06[A <: AnyRef, -B] extends AnyRef
extest.Cov06[_ <: AnyRef, _]
abstract trait Cov07[-A <: AnyRef, +B] extends AnyRef
- extest.Cov07[_ <: AnyRef, _]
+ extest.Cov07[_ <: AnyRef, Any]
abstract trait Cov08[-A <: AnyRef, B] extends AnyRef
extest.Cov08[_ <: AnyRef, _]
@@ -41,16 +41,16 @@ abstract trait Cov09[-A <: AnyRef, -B] extends AnyRef
extest.Cov09[_ <: AnyRef, _]
abstract trait Cov11[+A <: AnyRef, +B <: List[_]] extends AnyRef
- extest.Cov11[_ <: AnyRef, _ <: List[_]]
+ extest.Cov11[AnyRef,List[_]]
abstract trait Cov12[+A <: AnyRef, B <: List[_]] extends AnyRef
- extest.Cov12[_ <: AnyRef, _ <: List[_]]
+ extest.Cov12[AnyRef, _ <: List[_]]
abstract trait Cov13[+A <: AnyRef, -B <: List[_]] extends AnyRef
- extest.Cov13[_ <: AnyRef, _ <: List[_]]
+ extest.Cov13[AnyRef, _ <: List[_]]
abstract trait Cov14[A <: AnyRef, +B <: List[_]] extends AnyRef
- extest.Cov14[_ <: AnyRef, _ <: List[_]]
+ extest.Cov14[_ <: AnyRef, List[_]]
abstract trait Cov15[A <: AnyRef, B <: List[_]] extends AnyRef
extest.Cov15[_ <: AnyRef, _ <: List[_]]
@@ -59,7 +59,7 @@ abstract trait Cov16[A <: AnyRef, -B <: List[_]] extends AnyRef
extest.Cov16[_ <: AnyRef, _ <: List[_]]
abstract trait Cov17[-A <: AnyRef, +B <: List[_]] extends AnyRef
- extest.Cov17[_ <: AnyRef, _ <: List[_]]
+ extest.Cov17[_ <: AnyRef, List[_]]
abstract trait Cov18[-A <: AnyRef, B <: List[_]] extends AnyRef
extest.Cov18[_ <: AnyRef, _ <: List[_]]
@@ -68,16 +68,16 @@ abstract trait Cov19[-A <: AnyRef, -B <: List[_]] extends AnyRef
extest.Cov19[_ <: AnyRef, _ <: List[_]]
abstract trait Cov21[+A, +B] extends AnyRef
- extest.Cov21[_, _]
+ extest.Cov21[Any,Any]
abstract trait Cov22[+A, B] extends AnyRef
- extest.Cov22[_, _]
+ extest.Cov22[Any, _]
abstract trait Cov23[+A, -B] extends AnyRef
- extest.Cov23[_, _]
+ extest.Cov23[Any, _]
abstract trait Cov24[A, +B] extends AnyRef
- extest.Cov24[_, _]
+ extest.Cov24[_, Any]
abstract trait Cov25[A, B] extends AnyRef
extest.Cov25[_, _]
@@ -86,7 +86,7 @@ abstract trait Cov26[A, -B] extends AnyRef
extest.Cov26[_, _]
abstract trait Cov27[-A, +B] extends AnyRef
- extest.Cov27[_, _]
+ extest.Cov27[_, Any]
abstract trait Cov28[-A, B] extends AnyRef
extest.Cov28[_, _]
@@ -122,16 +122,16 @@ abstract trait Cov39[-A, -B, C <: Tuple2[_, _]] extends AnyRef
extest.Cov39[_, _, _ <: Tuple2[_, _]]
abstract trait Cov41[+A >: Null, +B] extends AnyRef
- extest.Cov41[_ >: Null, _]
+ extest.Cov41[Any,Any]
abstract trait Cov42[+A >: Null, B] extends AnyRef
- extest.Cov42[_ >: Null, _]
+ extest.Cov42[Any, _]
abstract trait Cov43[+A >: Null, -B] extends AnyRef
- extest.Cov43[_ >: Null, _]
+ extest.Cov43[Any, _]
abstract trait Cov44[A >: Null, +B] extends AnyRef
- extest.Cov44[_ >: Null, _]
+ extest.Cov44[_ >: Null, Any]
abstract trait Cov45[A >: Null, B] extends AnyRef
extest.Cov45[_ >: Null, _]
@@ -140,7 +140,7 @@ abstract trait Cov46[A >: Null, -B] extends AnyRef
extest.Cov46[_ >: Null, _]
abstract trait Cov47[-A >: Null, +B] extends AnyRef
- extest.Cov47[_ >: Null, _]
+ extest.Cov47[_ >: Null, Any]
abstract trait Cov48[-A >: Null, B] extends AnyRef
extest.Cov48[_ >: Null, _]
@@ -149,7 +149,7 @@ abstract trait Cov49[-A >: Null, -B] extends AnyRef
extest.Cov49[_ >: Null, _]
abstract trait Covariant[+A <: AnyRef, +B] extends AnyRef
- extest.Covariant[_ <: AnyRef, _]
+ extest.Covariant[AnyRef,Any]
abstract trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends AnyRef
extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] }
diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala
index c69d1217fd..14c25849cb 100644
--- a/test/files/run/existentials-in-compiler.scala
+++ b/test/files/run/existentials-in-compiler.scala
@@ -73,7 +73,7 @@ package extest {
def check(source: String, unit: global.CompilationUnit) = {
getRequiredPackage("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
- afterTyper {
+ exitingTyper {
clazz.info
println(clazz.defString)
println(" " + classExistentialType(clazz) + "\n")
diff --git a/test/files/run/exoticnames.scala b/test/files/run/exoticnames.scala
index fa0e5e6ec5..98f9a88776 100644
--- a/test/files/run/exoticnames.scala
+++ b/test/files/run/exoticnames.scala
@@ -1,7 +1,7 @@
// this is a run-test because the compiler should emit bytecode that'll pass the JVM's verifier
object Test extends App {
- def `(` = error("bla")
- def `.` = error("bla")
- def `)` = error("bla")
- def `,` = error("bla")
+ def `(` = sys.error("bla")
+ def `.` = sys.error("bla")
+ def `)` = sys.error("bla")
+ def `,` = sys.error("bla")
}
diff --git a/test/files/run/genericValueClass.scala b/test/files/run/genericValueClass.scala
index 68162bb685..768e1f86a5 100644
--- a/test/files/run/genericValueClass.scala
+++ b/test/files/run/genericValueClass.scala
@@ -1,11 +1,12 @@
-final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
- @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
- def →[B](y: B): Tuple2[A, B] = ->(y)
-}
object Test extends App {
+ class ArrowAssocClass[A](val __leftOfArrow: A) extends AnyVal {
+ @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
+ def →[B](y: B): Tuple2[A, B] = ->(y)
+ }
+
{
- @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+ @inline implicit def ArrowAssoc[A](x: A): ArrowAssocClass[A] = new ArrowAssocClass(x)
val x = 1 -> "abc"
println(x)
}
diff --git a/test/files/run/hashset.check b/test/files/run/hashset.check
new file mode 100644
index 0000000000..9542a1ff48
--- /dev/null
+++ b/test/files/run/hashset.check
@@ -0,0 +1,26 @@
+*** HashSet primitives
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19
+
+*** HashSet Strings with null
+null true
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,10,11,12,13,14,15,16,17,18,19,2,3,4,5,6,7,8,9,null
+null false
+0 false,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+
+*** ParHashSet primitives
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19
+
+*** ParHashSet Strings with null
+null true
+0 true,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+20 false,21 false,22 false,23 false,24 false,25 false,26 false,27 false,28 false,29 false,30 false,31 false,32 false,33 false,34 false,35 false,36 false,37 false,38 false,39 false
+0,1,10,11,12,13,14,15,16,17,18,19,2,3,4,5,6,7,8,9,null
+null false
+0 false,1 true,10 true,11 true,12 true,13 true,14 true,15 true,16 true,17 true,18 true,19 true,2 true,3 true,4 true,5 true,6 true,7 true,8 true,9 true
+
diff --git a/test/files/run/hashset.scala b/test/files/run/hashset.scala
new file mode 100644
index 0000000000..299dce3aec
--- /dev/null
+++ b/test/files/run/hashset.scala
@@ -0,0 +1,48 @@
+import scala.collection.generic.{Growable, Shrinkable}
+import scala.collection.GenSet
+import scala.collection.mutable.FlatHashTable
+import scala.collection.mutable.HashSet
+import scala.collection.parallel.mutable.ParHashSet
+
+object Test extends App {
+ test(new Creator{
+ def create[A] = new HashSet[A]
+ def hashSetType = "HashSet"
+ })
+
+ test(new Creator{
+ def create[A] = new ParHashSet[A]
+ def hashSetType = "ParHashSet"
+ })
+
+
+ def test(creator : Creator) {
+ println("*** " + creator.hashSetType + " primitives")
+ val h1 = creator.create[Int]
+ for (i <- 0 until 20) h1 += i
+ println((for (i <- 0 until 20) yield i + " " + (h1 contains i)).toList.sorted mkString(","))
+ println((for (i <- 20 until 40) yield i + " " + (h1 contains i)).toList.sorted mkString(","))
+ println(h1.toList.sorted mkString ",")
+ println
+
+ println("*** " + creator.hashSetType + " Strings with null")
+ val h2 = creator.create[String]
+ h2 += null
+ for (i <- 0 until 20) h2 += "" + i
+ println("null " + (h2 contains null))
+ println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(","))
+ println((for (i <- 20 until 40) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(","))
+ println((h2.toList map {x => "" + x}).sorted mkString ",")
+
+ h2 -= null
+ h2 -= "" + 0
+ println("null " + (h2 contains null))
+ println((for (i <- 0 until 20) yield i + " " + (h2 contains ("" + i))).toList.sorted mkString(","))
+ println
+ }
+
+ trait Creator {
+ def create[A] : GenSet[A] with Cloneable with FlatHashTable[A] with Growable[A] with Shrinkable[A]
+ def hashSetType : String
+ }
+} \ No newline at end of file
diff --git a/test/files/run/hashsetremove.check b/test/files/run/hashsetremove.check
new file mode 100644
index 0000000000..8de9826895
--- /dev/null
+++ b/test/files/run/hashsetremove.check
@@ -0,0 +1,6 @@
+remove 0 should be false, was false
+contains 1 should be true, was true
+remove 1 should be true, was true
+contains 1 should be false, was false
+remove 1 should be false, was false
+contains 1 should be false, was false
diff --git a/test/files/run/hashsetremove.scala b/test/files/run/hashsetremove.scala
new file mode 100644
index 0000000000..7b82a9909b
--- /dev/null
+++ b/test/files/run/hashsetremove.scala
@@ -0,0 +1,13 @@
+import scala.collection.mutable.HashSet
+
+
+object Test extends App {
+ val h = new HashSet[Int]
+ h += 1
+ println(s"remove 0 should be false, was ${h remove 0}")
+ println(s"contains 1 should be true, was ${h contains 1}")
+ println(s"remove 1 should be true, was ${h remove 1}")
+ println(s"contains 1 should be false, was ${h contains 1}")
+ println(s"remove 1 should be false, was ${h remove 1}")
+ println(s"contains 1 should be false, was ${h contains 1}")
+ } \ No newline at end of file
diff --git a/test/files/run/idempotency-this.check b/test/files/run/idempotency-this.check
index 8faf703d97..88b8288adf 100644
--- a/test/files/run/idempotency-this.check
+++ b/test/files/run/idempotency-this.check
@@ -1,4 +1,4 @@
List()
List.apply[String]("")
-Apply(TypeApply(Select(Ident(scala.collection.immutable.List), newTermName("apply")), List(TypeTree().setOriginal(Select(Ident(scala.Predef), newTypeName("String"))))), List(Literal(Constant(""))))
+Apply(TypeApply(Select(Ident(scala.collection.immutable.List), TermName("apply")), List(TypeTree().setOriginal(Select(Ident(scala.Predef), TypeName("String"))))), List(Literal(Constant(""))))
List()
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index f2f0b60687..abcc8bf42d 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -14,9 +14,9 @@
<
< 2:
247c246
-< blocks: [1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18]
+< blocks: [1,2,3,4,5,6,7,8,11,12,13,14,15,16,17,18]
---
-> blocks: [1,2,3,4,5,6,8,10,11,12,13,14,15,16,17,18]
+> blocks: [1,2,3,4,5,6,8,11,12,13,14,15,16,17,18]
258,260d256
< 92 JUMP 7
<
@@ -57,19 +57,18 @@
> ? LOAD_LOCAL(value x5)
> 106 CALL_METHOD MyException.message (dynamic)
519c518
-< blocks: [1,2,3,4,6,7,8,9,10]
+< blocks: [1,2,3,4,6,7,9,10]
---
-> blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
-548c547
+> blocks: [1,3,4,6,7,9,10,11,12,13]
+548c547,552
< 306 THROW(MyException)
---
> ? JUMP 11
-549a549,553
+>
> 11:
> ? LOAD_LOCAL(variable monitor4)
> 305 MONITOR_EXIT
> ? JUMP 12
->
554c558
< ? THROW(Throwable)
---
@@ -85,7 +84,13 @@
> 304 MONITOR_EXIT
> ? STORE_LOCAL(value t)
> ? JUMP 13
-575a587,598
+574c585
+< 310 JUMP 2
+---
+> 300 RETURN(UNIT)
+576c587,596
+< 2:
+---
> 13:
> 310 LOAD_MODULE object Predef
> 310 CALL_PRIMITIVE(StartConcat)
@@ -96,38 +101,35 @@
> 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
> 310 CALL_PRIMITIVE(EndConcat)
> 310 CALL_METHOD scala.Predef.println (dynamic)
-> 310 JUMP 2
->
-584c607
-< catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
+584c604
+< catch (Throwable) in ArrayBuffer(7, 9, 10) starting at: 6
---
-> catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
-587c610
-< catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
+> catch (Throwable) in ArrayBuffer(7, 9, 10, 11) starting at: 6
+587c607
+< catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10) starting at: 3
---
-> catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
-619c642
-< blocks: [1,2,3,4,5,6,7,9,10]
+> catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10, 11, 12) starting at: 3
+619c639
+< blocks: [1,3,4,5,6,8,9]
---
-> blocks: [1,2,3,4,5,6,7,9,10,11,12]
-643c666,667
+> blocks: [1,3,4,5,6,8,9,10,11]
+643c663,669
< 78 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
-> ? JUMP 11
-644a669,673
-> 11:
+> ? JUMP 10
+>
+> 10:
> 81 LOAD_LOCAL(value e)
> ? STORE_LOCAL(variable exc1)
-> ? JUMP 12
->
-672c701,702
+> ? JUMP 11
+669c695,696
< 81 THROW(Exception)
---
> ? STORE_LOCAL(variable exc1)
-> ? JUMP 12
-688a719,731
-> 12:
+> ? JUMP 11
+685a713,725
+> 11:
> 83 LOAD_MODULE object Predef
> 83 CONSTANT("finally")
> 83 CALL_METHOD scala.Predef.println (dynamic)
@@ -140,88 +142,88 @@
> 84 LOAD_LOCAL(variable exc1)
> 84 THROW(Throwable)
>
-694c737
-< catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
+691c731
+< catch (<none>) in ArrayBuffer(4, 5, 6, 8) starting at: 3
---
-> catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
-718c761
+> catch (<none>) in ArrayBuffer(4, 5, 6, 8, 10) starting at: 3
+715c755
< locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
-720c763
-< blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25]
+717c757
+< blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24]
---
-> blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28]
-744c787,794
+> blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24,25,26,27]
+741c781,788
< 172 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 26
+> ? JUMP 25
>
-> 26:
+> 25:
> 170 LOAD_LOCAL(value ex6)
> 170 STORE_LOCAL(value x4)
> 170 SCOPE_ENTER value x4
-> 170 JUMP 15
-787,790d836
+> 170 JUMP 14
+781,784d827
< 175 LOAD_LOCAL(value x5)
< 175 CALL_METHOD MyException.message (dynamic)
< 175 STORE_LOCAL(value message)
< 175 SCOPE_ENTER value message
-792c838,839
+786c829,830
< 176 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 176 CALL_METHOD MyException.message (dynamic)
-796c843,844
+790c834,835
< 177 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 177 CALL_METHOD MyException.message (dynamic)
-798c846,847
+792c837,838
< 177 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 27
-802c851,852
+> ? JUMP 26
+796c842,843
< 170 THROW(Throwable)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 27
-811a862,867
-> 27:
+> ? JUMP 26
+805a853,858
+> 26:
> 169 LOAD_LOCAL(value ex6)
> 169 STORE_LOCAL(value x4)
> 169 SCOPE_ENTER value x4
> 169 JUMP 5
>
-822,825d877
+816,819d868
< 180 LOAD_LOCAL(value x5)
< 180 CALL_METHOD MyException.message (dynamic)
< 180 STORE_LOCAL(value message)
< 180 SCOPE_ENTER value message
-827c879,880
+821c870,871
< 181 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 181 CALL_METHOD MyException.message (dynamic)
-831c884,885
+825c875,876
< 182 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 182 CALL_METHOD MyException.message (dynamic)
-833c887,888
+827c878,879
< 182 THROW(MyException)
---
> ? STORE_LOCAL(variable exc2)
-> ? JUMP 28
-837c892,893
+> ? JUMP 27
+831c883,884
< 169 THROW(Throwable)
---
> ? STORE_LOCAL(variable exc2)
-> ? JUMP 28
-853a910,922
-> 28:
+> ? JUMP 27
+847a901,913
+> 27:
> 184 LOAD_MODULE object Predef
> 184 CONSTANT("finally")
> 184 CALL_METHOD scala.Predef.println (dynamic)
@@ -234,23 +236,23 @@
> 185 LOAD_LOCAL(variable exc2)
> 185 THROW(Throwable)
>
-859c928
-< catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4
+853c919
+< catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23) starting at: 4
---
-> catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4
-862c931
-< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3
+> catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4
+856c922
+< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3
---
-> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3
-886c955
+> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3
+880c946
< locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value e, value ex6, value x4, value x5, value x
-888c957
+882c948
< blocks: [1,2,3,6,7,8,11,13,14,16]
---
> blocks: [1,2,3,6,7,8,11,13,14,16,17]
-912c981,988
+906c972,979
< 124 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
@@ -261,29 +263,29 @@
> 122 STORE_LOCAL(value x4)
> 122 SCOPE_ENTER value x4
> 122 JUMP 7
-937,940d1012
+931,934d1003
< 127 LOAD_LOCAL(value x5)
< 127 CALL_METHOD MyException.message (dynamic)
< 127 STORE_LOCAL(value message)
< 127 SCOPE_ENTER value message
-942c1014,1015
+936c1005,1006
< 127 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 127 CALL_METHOD MyException.message (dynamic)
-971c1044
+965c1035
< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
---
> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
-995c1068
+989c1059
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
---
> locals: value args, variable result, value ex6, value x4, value x5, value x, value e
-997c1070
+991c1061
< blocks: [1,2,3,4,5,8,12,13,14,16]
---
> blocks: [1,2,3,5,8,12,13,14,16,17]
-1021c1094,1103
+1015c1085,1094
< 148 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
@@ -296,25 +298,25 @@
> 154 LOAD_LOCAL(value x4)
> 154 IS_INSTANCE REF(class MyException)
> 154 CZJUMP (BOOL)NE ? 5 : 8
-1042,1044d1123
+1036,1038d1114
< 145 JUMP 4
<
< 4:
-1054,1057d1132
+1048,1051d1123
< 154 LOAD_LOCAL(value x5)
< 154 CALL_METHOD MyException.message (dynamic)
< 154 STORE_LOCAL(value message)
< 154 SCOPE_ENTER value message
-1059c1134,1135
+1053c1125,1126
< 154 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 154 CALL_METHOD MyException.message (dynamic)
-1276c1352
+1270c1343
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1300c1376,1383
+1294c1367,1374
< 38 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
@@ -325,20 +327,20 @@
> 42 CONSTANT("IllegalArgumentException")
> 42 CALL_METHOD scala.Predef.println (dynamic)
> 42 JUMP 2
-1347c1430
+1341c1421
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, value x4, value x5, value x
-1349c1432
+1343c1423
< blocks: [1,2,3,4,5,8,10,11,13,14,16]
---
> blocks: [1,2,3,5,8,10,11,13,14,16,17]
-1373c1456,1457
+1367c1447,1448
< 203 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
> ? JUMP 17
-1393c1477,1486
+1387c1468,1477
< 209 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
@@ -351,41 +353,41 @@
> 212 LOAD_LOCAL(value x4)
> 212 IS_INSTANCE REF(class MyException)
> 212 CZJUMP (BOOL)NE ? 5 : 8
-1406,1408d1498
+1400,1402d1489
< 200 JUMP 4
<
< 4:
-1418,1421d1507
+1412,1415d1498
< 212 LOAD_LOCAL(value x5)
< 212 CALL_METHOD MyException.message (dynamic)
< 212 STORE_LOCAL(value message)
< 212 SCOPE_ENTER value message
-1423c1509,1510
+1417c1500,1501
< 213 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 213 CALL_METHOD MyException.message (dynamic)
-1467c1554
+1461c1545
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1491c1578,1579
+1485c1569,1570
< 58 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 8
-1492a1581,1586
+1486a1572,1577
> 8:
> 62 LOAD_MODULE object Predef
> 62 CONSTANT("RuntimeException")
> 62 CALL_METHOD scala.Predef.println (dynamic)
> 62 JUMP 2
>
-1540c1634
-< blocks: [1,2,3,4]
+1534c1625
+< blocks: [1,3,4]
---
-> blocks: [1,2,3,4,5]
-1560c1654,1659
+> blocks: [1,3,4,5]
+1554c1645,1650
< 229 THROW(MyException)
---
> ? JUMP 5
@@ -394,19 +396,19 @@
> ? LOAD_LOCAL(variable monitor1)
> 228 MONITOR_EXIT
> 228 THROW(Throwable)
-1566c1665
+1560c1656
< ? THROW(Throwable)
---
> 228 THROW(Throwable)
-1594c1693
+1588c1684
< locals: value args, variable result, variable monitor2, variable monitorResult1
---
> locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
-1596c1695
-< blocks: [1,2,3,4]
+1590c1686
+< blocks: [1,3,4]
---
-> blocks: [1,2,3,4,5]
-1619c1718,1726
+> blocks: [1,3,4,5]
+1613c1709,1717
< 245 THROW(MyException)
---
> ? STORE_LOCAL(value exception$1)
@@ -418,7 +420,7 @@
> ? LOAD_LOCAL(variable monitor2)
> 244 MONITOR_EXIT
> 244 THROW(Throwable)
-1625c1732
+1619c1723
< ? THROW(Throwable)
---
> 244 THROW(Throwable)
diff --git a/test/files/run/inline-ex-handlers.scala b/test/files/run/inline-ex-handlers.scala
index a96b938e13..33e794b940 100644
--- a/test/files/run/inline-ex-handlers.scala
+++ b/test/files/run/inline-ex-handlers.scala
@@ -1,7 +1,7 @@
import scala.tools.partest.IcodeTest
object Test extends IcodeTest {
- override def printIcodeAfterPhase = "inlineExceptionHandlers"
+ override def printIcodeAfterPhase = "inlinehandlers"
}
import scala.util.Random._
diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala
new file mode 100644
index 0000000000..8dc6ae4e51
--- /dev/null
+++ b/test/files/run/iterator-from.scala
@@ -0,0 +1,69 @@
+// This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps
+
+import scala.util.{Random => R}
+import scala.collection._
+import scala.math.Ordered
+
+object Test extends App {
+ val maxLength = 25
+ val maxKey = 50
+ val maxValue = 50
+
+ def testSet[A <% Ordered[A]](set: SortedSet[A], list: List[A]) {
+ val distinctSorted = list.distinct.sorted
+ assertEquals("Set size wasn't the same as list sze", set.size, distinctSorted.size)
+
+ for(key <- distinctSorted) {
+ val clazz = set.getClass
+ val iteratorFrom = (set iteratorFrom key).toList
+ check(clazz, list, s"set iteratorFrom $key", s"(set from $key).iterator", iteratorFrom, (set from key).iterator.toList)
+ check(clazz, list, s"set.iteratorFrom $key", s"distinctSorted dropWhile (_ < $key)", iteratorFrom, distinctSorted dropWhile (_ < key))
+ check(clazz, list, s"set iteratorFrom $key", s"set keysIterator from $key", iteratorFrom, (set keysIteratorFrom key).toList)
+ }
+ }
+
+ def testMap[A <% Ordered[A], B](map: SortedMap[A, B], list: List[(A, B)]) {
+ val distinctSorted = distinctByKey(list).sortBy(_._1)
+ assertEquals("Map size wasn't the same as list sze", map.size, distinctSorted.size)
+
+ for(keyValue <- distinctSorted) {
+ val key = keyValue._1
+ val clazz = map.getClass
+ val iteratorFrom = (map iteratorFrom key).toList
+ check(clazz, list, s"map iteratorFrom $key", s"(map from $key).iterator", iteratorFrom, (map from key).iterator.toList)
+ check(clazz, list, s"map iteratorFrom $key", s"distinctSorted dropWhile (_._1 < $key)", iteratorFrom, distinctSorted dropWhile (_._1 < key))
+ check(clazz, list, s"map iteratorFrom $key map (_._1)", s"map keysIteratorFrom $key", iteratorFrom map (_._1), (map keysIteratorFrom key).toList)
+ check(clazz, list, s"map iteratorFrom $key map (_._2)", s"map valuesIteratorFrom $key", iteratorFrom map (_._2), (map valuesIteratorFrom key).toList)
+ }
+ }
+
+ def check[A](clazz: Class[_], list: List[_], m1: String, m2: String, l1: List[A], l2: List[A]) {
+ assertEquals(s"$clazz: `$m1` didn't match `$m2` on list $list", l1, l2)
+ }
+
+ def assertEquals[A](msg: String, x: A, y: A) {
+ assert(x == y, s"$msg\n1: $x\n2: $y")
+ }
+
+ def distinctByKey[A,B](list: List[(A, B)]) : List[(A,B)] = list.groupBy(_._1).map(_._2.last).toList
+
+ object Weekday extends Enumeration {
+ type Weekday = Value
+ val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
+ }
+
+ 0 until maxLength foreach {length =>
+ val keyValues = (0 until length map {_ => (R nextInt maxKey, R nextInt maxValue)}).toList
+ val keys = keyValues map (_._2)
+ testSet(immutable.BitSet(keys:_*), keys)
+ testSet(immutable.TreeSet(keys:_*), keys)
+ testSet(mutable.TreeSet(keys:_*), keys)
+ val days = keys map {n => Weekday(n % Weekday.values.size)}
+ testSet(Weekday.ValueSet(days:_*), days)
+
+ val treeMap = immutable.TreeMap(keyValues:_*)
+ testMap(treeMap, keyValues)
+ testMap(treeMap.filterKeys(_ % 2 == 0), keyValues filter (_._1 % 2 == 0))
+ testMap(treeMap mapValues (_ + 1), keyValues map {case (k,v) => (k, v + 1)})
+ }
+}
diff --git a/test/files/run/lazy-locals.scala b/test/files/run/lazy-locals.scala
index aca15d0357..8d4c61be8c 100644
--- a/test/files/run/lazy-locals.scala
+++ b/test/files/run/lazy-locals.scala
@@ -120,7 +120,7 @@ object Test extends App {
t
}
- /** test recursive method with lazy vals and a all vals forced */
+ /** test recursive method with lazy vals and all vals forced */
def testLazyRecMany(n: Int): Int = {
lazy val t = { println("forced lazy val t at n = " + n); 42 }
if (n > 0) {
diff --git a/test/files/run/longmap.check b/test/files/run/longmap.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/longmap.check
diff --git a/test/files/run/longmap.scala b/test/files/run/longmap.scala
new file mode 100644
index 0000000000..1f18eebd31
--- /dev/null
+++ b/test/files/run/longmap.scala
@@ -0,0 +1,8 @@
+object Test extends App{
+ import scala.collection.immutable.LongMap;
+
+ val it = LongMap(8L -> 2, 11L -> 3, 1L -> 2, 7L -> 13);
+
+ assert(it.firstKey == 1L);
+ assert(it.lastKey == 11L);
+}
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index 3461d1bf6b..f3a6bef215 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -8,7 +8,7 @@ scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
scala> // but reverted that for SI-5534.
scala> val x = List(List(), Vector())
-x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq{def dropRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def takeRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def drop(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def take(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]}]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.Ab...
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]; def takeRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]; def drop(n: Int): scala.collecti...
scala>
scala>
diff --git a/test/files/run/macro-abort-fresh/Macros_1.scala b/test/files/run/macro-abort-fresh/Macros_1.scala
index af1e292588..415b76852f 100644
--- a/test/files/run/macro-abort-fresh/Macros_1.scala
+++ b/test/files/run/macro-abort-fresh/Macros_1.scala
@@ -5,7 +5,7 @@ object Impls {
import c.universe._
println(c.fresh())
println(c.fresh("qwe"))
- println(c.fresh(newTypeName("qwe")))
+ println(c.fresh(TypeName("qwe")))
c.abort(NoPosition, "blargh")
}
}
diff --git a/test/files/run/macro-abort-fresh/Test_2.scala b/test/files/run/macro-abort-fresh/Test_2.scala
index 0b9986e9f6..61f0bdfadc 100644
--- a/test/files/run/macro-abort-fresh/Test_2.scala
+++ b/test/files/run/macro-abort-fresh/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ val tree = Select(Ident(TermName("Macros")), TermName("foo"))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-md-mi/Impls_1.scala b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
index 646634c972..ce30366c61 100644
--- a/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
+++ b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
@@ -3,19 +3,19 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
c.Expr[Int](body)
}
def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
c.Expr[Int](body)
}
def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
c.Expr[Int](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
index aa1e52e4aa..a601af6dde 100644
--- a/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
+++ b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
@@ -3,19 +3,19 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
c.Expr[Int](body)
}
def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
c.Expr[Int](body)
}
def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
c.Expr[Int](body)
}
}
diff --git a/test/files/run/macro-basic-mamd-mi/Impls_1.scala b/test/files/run/macro-basic-mamd-mi/Impls_1.scala
index 061aa2d4a3..6e5983bdec 100644
--- a/test/files/run/macro-basic-mamd-mi/Impls_1.scala
+++ b/test/files/run/macro-basic-mamd-mi/Impls_1.scala
@@ -3,17 +3,17 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))))
}
def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+ c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2)))))
}
def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
c.Expr[Int](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
index 0ca0be5a48..9c1e4ee46d 100644
--- a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
+++ b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
@@ -6,7 +6,7 @@ object Impls {
def refToFoo(dummy: Int) = macro refToFoo_impl
def refToFoo_impl(c: Ctx)(dummy: c.Expr[Int]) = {
import c.universe._
- val body = Select(Ident(newTermName("Impls")), newTermName("foo"))
+ val body = Select(Ident(TermName("Impls")), TermName("foo"))
c.Expr[Int](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation/Impls_1.scala b/test/files/run/macro-declared-in-annotation/Impls_1.scala
index a11ee2907a..1ea06de679 100644
--- a/test/files/run/macro-declared-in-annotation/Impls_1.scala
+++ b/test/files/run/macro-declared-in-annotation/Impls_1.scala
@@ -4,7 +4,7 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
val body = Block(List(printPrefix), Literal(Constant("this is deprecated")))
c.Expr[String](body)
}
diff --git a/test/files/run/macro-declared-in-anonymous/Impls_1.scala b/test/files/run/macro-declared-in-anonymous/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-anonymous/Impls_1.scala
+++ b/test/files/run/macro-declared-in-anonymous/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-block/Impls_1.scala b/test/files/run/macro-declared-in-block/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-block/Impls_1.scala
+++ b/test/files/run/macro-declared-in-block/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class/Impls_1.scala b/test/files/run/macro-declared-in-class-class/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-class-class/Impls_1.scala
+++ b/test/files/run/macro-declared-in-class-class/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object/Impls_1.scala b/test/files/run/macro-declared-in-class-object/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-class-object/Impls_1.scala
+++ b/test/files/run/macro-declared-in-class-object/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class/Impls_1.scala b/test/files/run/macro-declared-in-class/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-class/Impls_1.scala
+++ b/test/files/run/macro-declared-in-class/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param/Impls_1.scala b/test/files/run/macro-declared-in-default-param/Impls_1.scala
index db1e5c7435..4380f40b04 100644
--- a/test/files/run/macro-declared-in-default-param/Impls_1.scala
+++ b/test/files/run/macro-declared-in-default-param/Impls_1.scala
@@ -4,7 +4,7 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
val body = Block(List(printPrefix), Literal(Constant("it works")))
c.Expr[String](body)
}
diff --git a/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala b/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
index 837b306976..4c009cc367 100644
--- a/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
+++ b/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
@@ -4,8 +4,8 @@ object Impls {
def toOptionOfInt(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Ident(definitions.SomeModule), List(Select(Select(prefix.tree, newTermName("x")), newTermName("toInt")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Ident(definitions.SomeModule), List(Select(Select(prefix.tree, TermName("x")), TermName("toInt")))))
c.Expr[Option[Int]](body)
}
}
diff --git a/test/files/run/macro-declared-in-method/Impls_1.scala b/test/files/run/macro-declared-in-method/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-method/Impls_1.scala
+++ b/test/files/run/macro-declared-in-method/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class/Impls_1.scala b/test/files/run/macro-declared-in-object-class/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-object-class/Impls_1.scala
+++ b/test/files/run/macro-declared-in-object-class/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object/Impls_1.scala b/test/files/run/macro-declared-in-object-object/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-object-object/Impls_1.scala
+++ b/test/files/run/macro-declared-in-object-object/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object/Impls_1.scala b/test/files/run/macro-declared-in-object/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-object/Impls_1.scala
+++ b/test/files/run/macro-declared-in-object/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object/Impls_1.scala b/test/files/run/macro-declared-in-package-object/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-package-object/Impls_1.scala
+++ b/test/files/run/macro-declared-in-package-object/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement/Impls_1.scala b/test/files/run/macro-declared-in-refinement/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-refinement/Impls_1.scala
+++ b/test/files/run/macro-declared-in-refinement/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-declared-in-trait/Impls_1.scala b/test/files/run/macro-declared-in-trait/Impls_1.scala
index 6f06f6d3f0..348f3420f2 100644
--- a/test/files/run/macro-declared-in-trait/Impls_1.scala
+++ b/test/files/run/macro-declared-in-trait/Impls_1.scala
@@ -4,8 +4,8 @@ object Impls {
def foo(c: Ctx) = {
import c.{prefix => prefix}
import c.universe._
- val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
- val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works")))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b/Test_2.scala b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
index ea0fd4bbff..9e57b90b57 100644
--- a/test/files/run/macro-def-infer-return-type-b/Test_2.scala
+++ b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-duplicate.check b/test/files/run/macro-duplicate.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/macro-duplicate.check
diff --git a/test/files/run/macro-duplicate.flags b/test/files/run/macro-duplicate.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-duplicate.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-duplicate/Impls_Macros_1.scala b/test/files/run/macro-duplicate/Impls_Macros_1.scala
new file mode 100644
index 0000000000..af80147a90
--- /dev/null
+++ b/test/files/run/macro-duplicate/Impls_Macros_1.scala
@@ -0,0 +1,29 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val Expr(Block((cdef: ClassDef) :: Nil, _)) = reify { class C { def x = 2 } }
+ val cdef1 =
+ new Transformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Template(_, _, ctor :: defs) =>
+ val defs1 = defs collect {
+ case ddef @ DefDef(mods, name, tparams, vparamss, tpt, body) =>
+ val future = Select(Select(Select(Ident(TermName("scala")), TermName("concurrent")), TermName("package")), TermName("future"))
+ val Future = Select(Select(Ident(TermName("scala")), TermName("concurrent")), TypeName("Future"))
+ val tpt1 = if (tpt.isEmpty) tpt else AppliedTypeTree(Future, List(tpt))
+ val body1 = Apply(future, List(body))
+ val name1 = TermName("async" + name.toString.capitalize)
+ DefDef(mods, name1, tparams, vparamss, tpt1, body1)
+ }
+ Template(Nil, emptyValDef, ctor +: defs ::: defs1)
+ case _ =>
+ super.transform(tree)
+ }
+ } transform cdef
+ c.Expr[Unit](Block(cdef1 :: Nil, Literal(Constant(()))))
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macro-duplicate/Test_2.scala b/test/files/run/macro-duplicate/Test_2.scala
new file mode 100644
index 0000000000..6dbd4382d3
--- /dev/null
+++ b/test/files/run/macro-duplicate/Test_2.scala
@@ -0,0 +1,6 @@
+import scala.concurrent._
+import ExecutionContext.Implicits.global
+
+object Test extends App {
+ Macros.foo
+} \ No newline at end of file
diff --git a/test/files/run/macro-enclosures.check b/test/files/run/macro-enclosures.check
new file mode 100644
index 0000000000..36bb67e194
--- /dev/null
+++ b/test/files/run/macro-enclosures.check
@@ -0,0 +1,32 @@
+enclosingPackage = package test {
+ object Test extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def test = Macros.foo
+ }
+}
+enclosingClass = object Test extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def test = Macros.foo
+}
+enclosingImpl = object Test extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def test = Macros.foo
+}
+enclosingTemplate = scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def test = Macros.foo
+}
+enclosingMethod = def test = Macros.foo
+enclosingDef = def test = Macros.foo
diff --git a/test/files/run/macro-enclosures.flags b/test/files/run/macro-enclosures.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-enclosures.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-enclosures/Impls_Macros_1.scala b/test/files/run/macro-enclosures/Impls_Macros_1.scala
new file mode 100644
index 0000000000..cd54028676
--- /dev/null
+++ b/test/files/run/macro-enclosures/Impls_Macros_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = c.universe.reify {
+ println("enclosingPackage = " + c.literal(c.enclosingPackage.toString).splice)
+ println("enclosingClass = " + c.literal(c.enclosingClass.toString).splice)
+ println("enclosingImpl = " + c.literal(c.enclosingImpl.toString).splice)
+ println("enclosingTemplate = " + c.literal(c.enclosingTemplate.toString).splice)
+ println("enclosingMethod = " + c.literal(c.enclosingMethod.toString).splice)
+ println("enclosingDef = " + c.literal(c.enclosingDef.toString).splice)
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macro-enclosures/Test_2.scala b/test/files/run/macro-enclosures/Test_2.scala
new file mode 100644
index 0000000000..779fe5211e
--- /dev/null
+++ b/test/files/run/macro-enclosures/Test_2.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ test.Test.test
+}
+
+package test {
+ object Test {
+ def test = {
+ Macros.foo
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-argument/Macros_1.scala b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
index d9fd5b8cb0..b2c7b4d6ca 100644
--- a/test/files/run/macro-expand-implicit-argument/Macros_1.scala
+++ b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
@@ -41,14 +41,14 @@ object Macros {
def const(x:Int) = Literal(Constant(x))
val n = as.length
- val arr = newTermName("arr")
+ val arr = TermName("arr")
- val create = Apply(Select(ct.tree, newTermName("newArray")), List(const(n)))
+ val create = Apply(Select(ct.tree, TermName("newArray")), List(const(n)))
val arrtpe = TypeTree(implicitly[c.WeakTypeTag[Array[A]]].tpe)
val valdef = ValDef(Modifiers(), arr, arrtpe, create)
val updates = (0 until n).map {
- i => Apply(Select(Ident(arr), newTermName("update")), List(const(i), as(i).tree))
+ i => Apply(Select(Ident(arr), TermName("update")), List(const(i), as(i).tree))
}
val exprs = (Seq(valdef) ++ updates ++ Seq(Ident(arr))).toList
diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
index 082e6b2efe..ac1e55c9b2 100644
--- a/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
+++ b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(x.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(x.tree))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
index cceb038f05..aa1fc7a358 100644
--- a/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[String]): c.Expr[Option[Int]] = {
import c.universe._
- val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, newTermName("toInt"))))
+ val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, TermName("toInt"))))
c.Expr[Option[Int]](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
index cceb038f05..aa1fc7a358 100644
--- a/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
+++ b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[String]): c.Expr[Option[Int]] = {
import c.universe._
- val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, newTermName("toInt"))))
+ val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, TermName("toInt"))))
c.Expr[Option[Int]](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-multiple-arglists/Impls_1.scala b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
index 11e07932c3..4fddc13d68 100644
--- a/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
+++ b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
@@ -3,8 +3,8 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int])(y: c.Expr[Int]) = {
import c.universe._
- val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-generic/Impls_1.scala b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
index 1180c83a40..39a9db0e14 100644
--- a/test/files/run/macro-expand-nullary-generic/Impls_1.scala
+++ b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
@@ -4,7 +4,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def impl[T: c.WeakTypeTag](c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works " + implicitly[c.WeakTypeTag[T]]))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works " + implicitly[c.WeakTypeTag[T]]))))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
index c6bd1cdbf1..41e50acc86 100644
--- a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
+++ b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def impl(c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-overload/Impls_1.scala b/test/files/run/macro-expand-overload/Impls_1.scala
index f7c240d9ca..1c672f6040 100644
--- a/test/files/run/macro-expand-overload/Impls_1.scala
+++ b/test/files/run/macro-expand-overload/Impls_1.scala
@@ -4,7 +4,7 @@ object Impls {
def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
import c.{prefix => prefix}
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-override/Impls_1.scala b/test/files/run/macro-expand-override/Impls_1.scala
index ec93dd4111..69ef57d18d 100644
--- a/test/files/run/macro-expand-override/Impls_1.scala
+++ b/test/files/run/macro-expand-override/Impls_1.scala
@@ -4,7 +4,7 @@ object Impls {
def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
import c.{prefix => prefix}
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-expand-recursive/Impls_1.scala b/test/files/run/macro-expand-recursive/Impls_1.scala
index 61db5c4a9b..47dd398454 100644
--- a/test/files/run/macro-expand-recursive/Impls_1.scala
+++ b/test/files/run/macro-expand-recursive/Impls_1.scala
@@ -3,13 +3,13 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
c.Expr[Unit](body)
}
def fooFoo(c: Ctx) = {
import c.universe._
- val body = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ val body = Select(Ident(TermName("Macros")), TermName("foo"))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
index 72b420d92f..f748ab855f 100644
--- a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
@@ -5,7 +5,7 @@ object Impls {
def foo[U: c.WeakTypeTag](c: Ctx) = {
import c.universe._
val U = implicitly[c.WeakTypeTag[U]]
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
index 33770516df..c729aada51 100644
--- a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
@@ -5,7 +5,7 @@ object Impls {
def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
import c.universe._
val U = implicitly[c.WeakTypeTag[U]]
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional/Impls_1.scala b/test/files/run/macro-expand-tparams-optional/Impls_1.scala
index 3b829e2e09..ace7a6cd26 100644
--- a/test/files/run/macro-expand-tparams-optional/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-optional/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo[U](c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("don't know U"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("don't know U"))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
index 33770516df..c729aada51 100644
--- a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
@@ -5,7 +5,7 @@ object Impls {
def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
import c.universe._
val U = implicitly[c.WeakTypeTag[U]]
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
index 9378e67712..8880d13b04 100644
--- a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
@@ -6,7 +6,7 @@ object Impls {
import c.universe._
val T = implicitly[c.WeakTypeTag[T]]
val U = implicitly[c.WeakTypeTag[U]]
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
index afdd7d4f7a..2df42e969f 100644
--- a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
@@ -5,9 +5,9 @@ object Impls {
def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
c.Expr(Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(()))))
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
index 3c2838208a..08817708d4 100644
--- a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
@@ -5,9 +5,9 @@ object Impls {
def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
c.Expr(Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(()))))
}
}
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
index afdd7d4f7a..2df42e969f 100644
--- a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
@@ -5,9 +5,9 @@ object Impls {
def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
c.Expr(Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(()))))
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-a.check b/test/files/run/macro-expand-unapply-a.check
new file mode 100644
index 0000000000..7c2976e51e
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-a.check
@@ -0,0 +1,2 @@
+(1,2)
+(1,2,3)
diff --git a/test/files/run/macro-expand-unapply-a.flags b/test/files/run/macro-expand-unapply-a.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala b/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala
new file mode 100644
index 0000000000..61d6345f16
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-a/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+
+object Helper {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] = List.unapplySeq[T](x)
+}
+
+object Macros {
+ def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[List[T]]) = {
+ c.universe.reify(Helper.unapplySeq(x.splice))
+ }
+
+ object UnapplyMacro {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] = macro impl[T]
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-a/Test_2.scala b/test/files/run/macro-expand-unapply-a/Test_2.scala
new file mode 100644
index 0000000000..6169d86b19
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-a/Test_2.scala
@@ -0,0 +1,6 @@
+import Macros._
+
+object Test extends App {
+ List(1, 2) match { case UnapplyMacro(x, y) => println((x, y)) }
+ List(1, 2, 3) match { case UnapplyMacro(x, y, z) => println((x, y, z)) }
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-b.check b/test/files/run/macro-expand-unapply-b.check
new file mode 100644
index 0000000000..5272f0d00a
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-b.check
@@ -0,0 +1,2 @@
+(1,List(2))
+List(1)
diff --git a/test/files/run/macro-expand-unapply-b.flags b/test/files/run/macro-expand-unapply-b.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala b/test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala
new file mode 100644
index 0000000000..d0300bdf7e
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-b/Impls_Macros_1.scala
@@ -0,0 +1,37 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ implicit class ContextExtensions(c: StringContext) {
+ object q {
+ def unapply(x: Any): Option[Any] = macro impl
+ }
+ }
+
+ def impl(c: Context)(x: c.Expr[Any]): c.Expr[Option[Any]] = {
+ import c.universe._
+ import Flag._
+
+ // parts here will be string literals - static parts of the string interpolation
+ // e.g. for q"$x, $y" parts will be Literal(Constant("")), Literal(Constant(", ")) and Literal(Constant(""))
+ val Apply(Select(Select(Apply(_, List(Apply(_, parts))), _), _), _) = c.macroApplication
+ val nresults = parts.length - 1
+
+ def results() =
+ ((1 to (nresults - 1)).toList map (i => Literal(Constant(i)))) :+ // (n - 1) results of type Int
+ Apply(Ident(TermName("List")), List(Literal(Constant(nresults)))) // and also one result of a different type
+ def extractorBody() =
+ if (nresults == 0) Literal(Constant(true))
+ else if (nresults == 1) Apply(Ident(TermName("Some")), results())
+ else Apply(Ident(TermName("Some")), List(Apply(Ident(TermName("Tuple" + nresults)), results())))
+
+ val name = TermName(java.util.UUID.randomUUID().toString.replace("-", ""))
+ val mdef = ModuleDef(NoMods, name, Template(List(Select(Ident(TermName("scala")), TypeName("AnyRef"))), emptyValDef, List(
+ DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
+ Block(List(pendingSuperCall), Literal(Constant(())))),
+ DefDef(Modifiers(), TermName("unapply"), List(), List(List(ValDef(Modifiers(PARAM), TermName("x"), Ident(TypeName("Any")), EmptyTree))), TypeTree(),
+ extractorBody()))))
+ c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, mdef)
+ c.Expr[Option[Any]](Apply(Select(Ident(name), TermName("unapply")), List(x.tree)))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-expand-unapply-b/Test_2.scala b/test/files/run/macro-expand-unapply-b/Test_2.scala
new file mode 100644
index 0000000000..5352160dfe
--- /dev/null
+++ b/test/files/run/macro-expand-unapply-b/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import Macros._
+ def whatever() = null
+ val q"$x1, $y1" = whatever()
+ println(x1, y1)
+ val q"$x2" = whatever()
+ println(x2)
+}
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
index 2ef8f04be9..f6c1d27d54 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(xs: c.Expr[Int]*) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), xs.map(_.tree).toList)
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), xs.map(_.tree).toList)
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
index c832826d64..b844012d53 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
@@ -6,7 +6,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Typed(Apply(Ident(definitions.ListModule), List(Literal(Constant(1)), Literal(Constant(2)))), Ident(tpnme.WILDCARD_STAR))))
+ val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Typed(Apply(Ident(definitions.ListModule), List(Literal(Constant(1)), Literal(Constant(2)))), Ident(tpnme.WILDCARD_STAR))))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
index 3c7f94f605..363ff0e0aa 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
@@ -7,7 +7,7 @@ object Impls {
case List(Typed(stripped, Ident(wildstar))) if wildstar == tpnme.WILDCARD_STAR => List(stripped)
case _ => ???
}
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), stripped_xs)
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), stripped_xs)
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
index 2066893bdc..0b61ab2f9b 100644
--- a/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
@@ -7,7 +7,7 @@ object Impls {
def foo(c: Ctx)(xs: c.Expr[Int]*) = {
import c.universe._
- val body = Apply(Select(Ident(newTermName("Impls")), newTermName("myprintln")), xs.map(_.tree).toList)
+ val body = Apply(Select(Ident(TermName("Impls")), TermName("myprintln")), xs.map(_.tree).toList)
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
index 2ef8f04be9..f6c1d27d54 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(xs: c.Expr[Int]*) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), xs.map(_.tree).toList)
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), xs.map(_.tree).toList)
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
index 2066893bdc..0b61ab2f9b 100644
--- a/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
@@ -7,7 +7,7 @@ object Impls {
def foo(c: Ctx)(xs: c.Expr[Int]*) = {
import c.universe._
- val body = Apply(Select(Ident(newTermName("Impls")), newTermName("myprintln")), xs.map(_.tree).toList)
+ val body = Apply(Select(Ident(TermName("Impls")), TermName("myprintln")), xs.map(_.tree).toList)
c.Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
index 7c40045c0f..95d746980e 100644
--- a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
+++ b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
@@ -7,10 +7,10 @@ object Impls {
import c.universe._
val U = implicitly[c.WeakTypeTag[U]]
val body = Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix tree is: " + prefix.tree.tpe)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + U.tpe))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo_targs...")))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix tree is: " + prefix.tree.tpe)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("U is: " + U.tpe))))),
Literal(Constant(())))
c.Expr[Unit](body)
}
diff --git a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
index 56c23f5faf..738c88bbc8 100644
--- a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
+++ b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
@@ -4,7 +4,7 @@ object Impls {
def foo(unconventionalName: Ctx)(x: unconventionalName.Expr[Int]) = {
import unconventionalName.universe._
val body = Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo..."))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo..."))))),
Literal(Constant(())))
unconventionalName.Expr[Unit](body)
}
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
index 0b9986e9f6..61f0bdfadc 100644
--- a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ val tree = Select(Ident(TermName("Macros")), TermName("foo"))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
index fb0d55208c..869a5a41fa 100644
--- a/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx) = {
import c.universe._
- val body = Ident(newTermName("IDoNotExist"))
+ val body = Ident(TermName("IDoNotExist"))
c.Expr[Int](body)
}
}
diff --git a/test/files/run/macro-invalidret-nontypeable/Test_2.scala b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
index 0daee49a08..7cd474ff52 100644
--- a/test/files/run/macro-invalidret-nontypeable/Test_2.scala
+++ b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
@@ -2,7 +2,7 @@
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ val tree = Select(Ident(TermName("Macros")), TermName("foo"))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-badret/Test_2.scala b/test/files/run/macro-invalidusage-badret/Test_2.scala
index 5cb0be5ddd..fc71353f54 100644
--- a/test/files/run/macro-invalidusage-badret/Test_2.scala
+++ b/test/files/run/macro-invalidusage-badret/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Typed(Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42)))), Ident(newTypeName("String")))
+ val tree = Typed(Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42)))), Ident(TypeName("String")))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
index 4583a726cf..8a93161af5 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[T]) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(x.tree.toString))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(x.tree.toString))))
c.Expr[Unit](body)
}
}
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
index e453d0b70c..9a34c62e0f 100644
--- a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ val tree = Select(Ident(TermName("Macros")), TermName("foo"))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
index 5866469499..3ac9cd2a8d 100644
--- a/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
+++ b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
@@ -3,8 +3,8 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int])(y: c.Expr[Int]) = {
import c.universe._
- val sum = Apply(Select(x.tree, newTermName("$plus")), List(y.tree))
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ val sum = Apply(Select(x.tree, TermName("$plus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
c.Expr[Unit](body)
}
}
diff --git a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
index dc48c127f4..75b8c139d4 100644
--- a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
+++ b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(40))))
+ val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(40))))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-openmacros/Impls_Macros_1.scala b/test/files/run/macro-openmacros/Impls_Macros_1.scala
index b863ac048b..50a1782431 100644
--- a/test/files/run/macro-openmacros/Impls_Macros_1.scala
+++ b/test/files/run/macro-openmacros/Impls_Macros_1.scala
@@ -14,7 +14,7 @@ object Macros {
}
import c.universe._
- val next = if (c.enclosingMacros.length < 3) c.Expr[Unit](Select(Ident(c.mirror.staticModule("Macros")), newTermName("foo"))) else c.literalUnit
+ val next = if (c.enclosingMacros.length < 3) c.Expr[Unit](Select(Ident(c.mirror.staticModule("Macros")), TermName("foo"))) else c.literalUnit
c.universe.reify {
println(c.literal(normalizePaths(c.enclosingMacros.toString)).splice)
next.splice
diff --git a/test/files/run/macro-range/Common_1.scala b/test/files/run/macro-range/Common_1.scala
index 5c4bc211fc..4083e6126e 100644
--- a/test/files/run/macro-range/Common_1.scala
+++ b/test/files/run/macro-range/Common_1.scala
@@ -43,5 +43,5 @@ abstract class Utils {
LabelDef(lname, Nil, rhs)
}
def makeBinop(left: Tree, op: String, right: Tree): Tree =
- Apply(Select(left, newTermName(op)), List(right))
+ Apply(Select(left, TermName(op)), List(right))
}
diff --git a/test/files/run/macro-range/Expansion_Impossible_2.scala b/test/files/run/macro-range/Expansion_Impossible_2.scala
index 57e0cee97f..ca0db48822 100644
--- a/test/files/run/macro-range/Expansion_Impossible_2.scala
+++ b/test/files/run/macro-range/Expansion_Impossible_2.scala
@@ -16,11 +16,11 @@ object Impls {
// scala"($_this: RangeDefault).foreach($f)"
c.Expr(c.prefix.tree match {
case Apply(Select(New(tpt), initName), List(lo, hi)) if tpt.symbol.fullName == "Range" =>
- val iname = newTermName("$i")
- val hname = newTermName("$h")
+ val iname = TermName("$i")
+ val hname = TermName("$h")
def iref = Ident(iname)
def href = Ident(hname)
- val labelname = newTermName("$while")
+ val labelname = TermName("$while")
val cond = makeBinop(iref, "$less", href)
val body = Block(
List(makeApply(f.tree, List(iref))),
@@ -37,8 +37,8 @@ object Impls {
case _ =>
Apply(
Select(
- Typed(c.prefix.tree, Ident(newTypeName("RangeDefault"))),
- newTermName("foreach")),
+ Typed(c.prefix.tree, Ident(TypeName("RangeDefault"))),
+ TermName("foreach")),
List(f.tree))
})
}
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
index fa559334d4..51e0264ed5 100644
--- a/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
+++ b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]) = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
c.Expr[Int](body)
}
}
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
index 2e64c01e35..267d1bc7b0 100644
--- a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
+++ b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
@@ -2,6 +2,6 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
println(cm.mkToolBox().eval(tree))
}
diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
index 5d7e077731..4261a6d45d 100644
--- a/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
+++ b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
@@ -3,7 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]) = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
c.Expr[Int](body)
}
} \ No newline at end of file
diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
index 70560009b1..13cd953bde 100644
--- a/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
+++ b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
@@ -8,12 +8,12 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val macrobody = Select(Ident(newTermName("Impls")), newTermName("foo"))
- val macroparam = ValDef(NoMods, newTermName("x"), TypeTree(definitions.IntClass.toType), EmptyTree)
- val macrodef = DefDef(Modifiers(MACRO), newTermName("foo"), Nil, List(List(macroparam)), TypeTree(), macrobody)
+ val macrobody = Select(Ident(TermName("Impls")), TermName("foo"))
+ val macroparam = ValDef(NoMods, TermName("x"), TypeTree(definitions.IntClass.toType), EmptyTree)
+ val macrodef = DefDef(Modifiers(MACRO), TermName("foo"), Nil, List(List(macroparam)), TypeTree(), macrobody)
val modulector = DefDef(NoMods, nme.CONSTRUCTOR, Nil, List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(()))))
- val module = ModuleDef(NoMods, newTermName("Macros"), Template(Nil, emptyValDef, List(modulector, macrodef)))
- val macroapp = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ val module = ModuleDef(NoMods, TermName("Macros"), Template(Nil, emptyValDef, List(modulector, macrodef)))
+ val macroapp = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
val tree = Block(List(macrodef, module), macroapp)
val toolbox = cm.mkToolBox(options = "-language:experimental.macros")
println(toolbox.eval(tree))
diff --git a/test/files/run/macro-reify-freevars/Test_2.scala b/test/files/run/macro-reify-freevars/Test_2.scala
index 7af9d89bdb..c2d0118e17 100644
--- a/test/files/run/macro-reify-freevars/Test_2.scala
+++ b/test/files/run/macro-reify-freevars/Test_2.scala
@@ -2,10 +2,10 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val q = New(AppliedTypeTree(Select(Select(Select(Ident(newTermName("scala")), newTermName("collection")), newTermName("slick")), newTypeName("Queryable")), List(Ident(newTermName("Int")))))
- val x = ValDef(NoMods, newTermName("x"), Ident(newTermName("Int")), EmptyTree)
- val fn = Function(List(x), Apply(Select(Ident(newTermName("x")), newTermName("$plus")), List(Literal(Constant("5")))))
- val tree = Apply(Select(q, newTermName("map")), List(fn))
+ val q = New(AppliedTypeTree(Select(Select(Select(Ident(TermName("scala")), TermName("collection")), TermName("slick")), TypeName("Queryable")), List(Ident(TermName("Int")))))
+ val x = ValDef(NoMods, TermName("x"), Ident(TermName("Int")), EmptyTree)
+ val fn = Function(List(x), Apply(Select(Ident(TermName("x")), TermName("$plus")), List(Literal(Constant("5")))))
+ val tree = Apply(Select(q, TermName("map")), List(fn))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
} \ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
index b4351c2c53..bb6a45e11e 100644
--- a/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
@@ -28,7 +28,7 @@ object QueryableMacros{
val foo = c.Expr[ru.Expr[Queryable[S]]](
c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
Utils[c.type](c).removeDoubleReify(
- Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
+ Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
).asInstanceOf[Tree]
)))
c.universe.reify{ Queryable.factory[S]( foo.splice )}
diff --git a/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
index b4351c2c53..bb6a45e11e 100644
--- a/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
@@ -28,7 +28,7 @@ object QueryableMacros{
val foo = c.Expr[ru.Expr[Queryable[S]]](
c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
Utils[c.type](c).removeDoubleReify(
- Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
+ Apply(Select(c.prefix.tree, TermName( name )), List( projection.tree ))
).asInstanceOf[Tree]
)))
c.universe.reify{ Queryable.factory[S]( foo.splice )}
diff --git a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
index 54bd03fcd2..dbc17e7c15 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
@@ -2,7 +2,7 @@ object Test extends App {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
- val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ val tree = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant(42))))
try println(cm.mkToolBox().eval(tree))
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-reify-tagless-a/Test_2.scala b/test/files/run/macro-reify-tagless-a/Test_2.scala
index 584c4bdf5b..afb418a755 100644
--- a/test/files/run/macro-reify-tagless-a/Test_2.scala
+++ b/test/files/run/macro-reify-tagless-a/Test_2.scala
@@ -6,9 +6,9 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tpt = AppliedTypeTree(Ident(definitions.ListClass), List(Ident(definitions.StringClass)))
- val rhs = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant("hello world"))))
- val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
- val tree = Block(List(list), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
+ val rhs = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant("hello world"))))
+ val list = ValDef(NoMods, TermName("list"), tpt, rhs)
+ val tree = Block(List(list), Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Ident(list.name))))
try cm.mkToolBox().eval(tree)
catch { case ex: Throwable => println(ex.getMessage) }
}
diff --git a/test/files/run/macro-reify-type/Macros_1.scala b/test/files/run/macro-reify-type/Macros_1.scala
index 06de05735d..c4d1d9f8ad 100644
--- a/test/files/run/macro-reify-type/Macros_1.scala
+++ b/test/files/run/macro-reify-type/Macros_1.scala
@@ -8,7 +8,7 @@ object StaticReflect {
import c.universe._
val nameName: TermName = name.tree match {
- case Literal(Constant(str: String)) => newTermName(str)
+ case Literal(Constant(str: String)) => TermName(str)
case _ => c.error(c.enclosingPosition, s"Method name not constant.") ; return reify(ru.NoType)
}
val clazz = weakTypeOf[A]
@@ -17,8 +17,8 @@ object StaticReflect {
case NoSymbol => c.error(c.enclosingPosition, s"No member called $nameName in $clazz.") ; reify(ru.NoType)
case member =>
val mtpe = member typeSignatureIn clazz
- val mtag = c.reifyType(treeBuild.mkRuntimeUniverseRef, Select(treeBuild.mkRuntimeUniverseRef, newTermName("rootMirror")), mtpe)
- val mtree = Select(mtag, newTermName("tpe"))
+ val mtag = c.reifyType(treeBuild.mkRuntimeUniverseRef, Select(treeBuild.mkRuntimeUniverseRef, TermName("rootMirror")), mtpe)
+ val mtree = Select(mtag, TermName("tpe"))
c.Expr[ru.Type](mtree)
}
diff --git a/test/files/run/macro-reify-type/Test_2.scala b/test/files/run/macro-reify-type/Test_2.scala
index 9beaf98681..1f35973531 100644
--- a/test/files/run/macro-reify-type/Test_2.scala
+++ b/test/files/run/macro-reify-type/Test_2.scala
@@ -7,10 +7,10 @@ object Test extends App {
//val $m: $u.Mirror = scala.reflect.runtime.universe.rootMirror;
//import $u._, $m._, Flag._
//val tpe = {
- // val symdef$B2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTypeName("B"), NoPosition, DEFERRED | PARAM, false);
- // val symdef$That2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTypeName("That"), NoPosition, DEFERRED | PARAM, false);
- // val symdef$f2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTermName("f"), NoPosition, PARAM, false);
- // val symdef$bf2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTermName("bf"), NoPosition, IMPLICIT | PARAM, false);
+ // val symdef$B2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TypeName("B"), NoPosition, DEFERRED | PARAM, false);
+ // val symdef$That2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TypeName("That"), NoPosition, DEFERRED | PARAM, false);
+ // val symdef$f2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TermName("f"), NoPosition, PARAM, false);
+ // val symdef$bf2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), TermName("bf"), NoPosition, IMPLICIT | PARAM, false);
// build.setTypeSignature(symdef$B2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
// build.setTypeSignature(symdef$That2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
// build.setTypeSignature(symdef$f2, TypeRef(ThisType(staticPackage("scala").asModule.moduleClass), staticClass("scala.Function1"), List(staticClass("scala.Int").asType.toTypeConstructor, TypeRef(NoPrefix, symdef$B2, List()))));
diff --git a/test/files/run/macro-reify-unreify/Macros_1.scala b/test/files/run/macro-reify-unreify/Macros_1.scala
index 9f04c13014..25ed352cca 100644
--- a/test/files/run/macro-reify-unreify/Macros_1.scala
+++ b/test/files/run/macro-reify-unreify/Macros_1.scala
@@ -9,7 +9,7 @@ object Macros {
import treeBuild._
val world = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, s.tree)
- val greeting = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, c.typeCheck(Apply(Select(Literal(Constant("hello ")), newTermName("$plus")), List(c.unreifyTree(world)))))
+ val greeting = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, c.typeCheck(Apply(Select(Literal(Constant("hello ")), TermName("$plus")), List(c.unreifyTree(world)))))
val typedGreeting = c.Expr[String](greeting)
c.universe.reify {
diff --git a/test/files/run/macro-repl-basic.check b/test/files/run/macro-repl-basic.check
index 7deed4a878..8d43a3aa16 100644
--- a/test/files/run/macro-repl-basic.check
+++ b/test/files/run/macro-repl-basic.check
@@ -14,23 +14,23 @@ scala>
scala> object Impls {
def foo(c: Ctx)(x: c.Expr[Int]) = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
c.Expr[Int](body)
}
def bar(c: Ctx)(x: c.Expr[Int]) = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
c.Expr[Int](body)
}
def quux(c: Ctx)(x: c.Expr[Int]) = {
import c.universe._
- val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
c.Expr[Int](body)
}
}
-defined module Impls
+defined object Impls
scala> object Macros {
object Shmacros {
@@ -40,7 +40,7 @@ scala> object Macros {
}; class Macros {
def quux(x: Int): Int = macro Impls.quux
}
-defined module Macros
+defined object Macros
defined class Macros
scala>
diff --git a/test/files/run/macro-repl-basic.scala b/test/files/run/macro-repl-basic.scala
index eae1febb3a..3c22c13dc7 100644
--- a/test/files/run/macro-repl-basic.scala
+++ b/test/files/run/macro-repl-basic.scala
@@ -8,19 +8,19 @@ object Test extends ReplTest {
|object Impls {
| def foo(c: Ctx)(x: c.Expr[Int]) = {
| import c.universe._
- | val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ | val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
| c.Expr[Int](body)
| }
|
| def bar(c: Ctx)(x: c.Expr[Int]) = {
| import c.universe._
- | val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ | val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
| c.Expr[Int](body)
| }
|
| def quux(c: Ctx)(x: c.Expr[Int]) = {
| import c.universe._
- | val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ | val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
| c.Expr[Int](body)
| }
|}
diff --git a/test/files/run/macro-repl-dontexpand.check b/test/files/run/macro-repl-dontexpand.check
index 628a9146c4..99b70ea3dc 100644
--- a/test/files/run/macro-repl-dontexpand.check
+++ b/test/files/run/macro-repl-dontexpand.check
@@ -7,6 +7,6 @@ scala> def bar(c: scala.reflect.macros.Context) = ???
bar: (c: scala.reflect.macros.Context)Nothing
scala> def foo = macro bar
-foo: Any
+defined term macro foo: Any
scala>
diff --git a/test/files/run/macro-toplevel-companion-a.check b/test/files/run/macro-toplevel-companion-a.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-a.check
diff --git a/test/files/run/macro-toplevel-companion-a.flags b/test/files/run/macro-toplevel-companion-a.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala b/test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala
new file mode 100644
index 0000000000..23e8694ddc
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-a/Impls_Macros_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val synthetic = reify{ class C { override def toString = "C" }; object C { implicit val c = new C } }.tree
+ val defs = synthetic.asInstanceOf[Block].stats.asInstanceOf[List[ImplDef]]
+ if (c.topLevelRef(TypeName("C")).isEmpty) c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, defs: _*)
+ c.literalUnit
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-a/Test_2.scala b/test/files/run/macro-toplevel-companion-a/Test_2.scala
new file mode 100644
index 0000000000..78b65b5b1f
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-a/Test_2.scala
@@ -0,0 +1,8 @@
+import Macros._
+
+object Test extends App {
+ foo;
+ implicitly[C];
+ foo;
+ implicitly[C];
+} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-b.check b/test/files/run/macro-toplevel-companion-b.check
new file mode 100644
index 0000000000..bd30dc75d3
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-b.check
@@ -0,0 +1,4 @@
+reflective compilation has failed:
+
+Companions 'class C' and 'object C' must be defined in same file:
+ Found in <synthetic file name> and <synthetic file name>
diff --git a/test/files/run/macro-toplevel-companion-b.flags b/test/files/run/macro-toplevel-companion-b.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala b/test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala
new file mode 100644
index 0000000000..f30adc2965
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-b/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val Block(List(cdef: ClassDef), _) = reify{ class C }.tree
+ val classRef = c.topLevelRef(TypeName("C")) orElse c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, cdef)
+ val Block(List(mdef: ModuleDef), _) = reify{ object C }.tree
+ val moduleRef = c.topLevelRef(TermName("C")) orElse c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, mdef)
+ c.literalUnit
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-b/Test_2.scala b/test/files/run/macro-toplevel-companion-b/Test_2.scala
new file mode 100644
index 0000000000..ca202d053f
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-b/Test_2.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import Macros._
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ try tb.compile(Select(Ident(TermName("Macros")), TermName("foo")))
+ catch { case ToolBoxError(message, _) => println("""macroSynthetic-.*?\.scala""".r.replaceAllIn(message, "<synthetic file name>")) }
+} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-c.check b/test/files/run/macro-toplevel-companion-c.check
new file mode 100644
index 0000000000..8b422c2061
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-c.check
@@ -0,0 +1,3 @@
+error: Companions 'class C' and 'object C' must be defined in same file:
+ Found in <synthetic file name> and newSource1
+
diff --git a/test/files/run/macro-toplevel-companion-c.flags b/test/files/run/macro-toplevel-companion-c.flags
new file mode 100644
index 0000000000..cd66464f2f
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-c.flags
@@ -0,0 +1 @@
+-language:experimental.macros \ No newline at end of file
diff --git a/test/files/run/macro-toplevel-companion-c.scala b/test/files/run/macro-toplevel-companion-c.scala
new file mode 100644
index 0000000000..0e99903158
--- /dev/null
+++ b/test/files/run/macro-toplevel-companion-c.scala
@@ -0,0 +1,51 @@
+import scala.tools.partest._
+import java.io._
+
+object Test extends DirectTest {
+ def code = ???
+
+ def macros_1 = """
+ package test
+
+ import scala.reflect.macros.Context
+ import language.experimental.macros
+
+ object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val Block(List(cdef: ClassDef), _) = reify{ class C }.tree
+ val ref = c.topLevelRef(TypeName("test.C")) orElse c.introduceTopLevel("test", cdef)
+ c.literalUnit
+ }
+
+ def foo = macro impl
+ }
+ """
+ def compileMacros() = {
+ val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
+ compileString(newCompiler("-language:experimental.macros", "-cp", classpath, "-d", testOutput.path))(macros_1)
+ }
+
+ def test_2 = """
+ package test
+ object C { Macros.foo }
+ """
+ def compileTest() = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(test_2)
+ }
+
+ def show(): Unit = {
+ // redirect err to string, for logging
+ val prevErr = System.err
+ val baos = new ByteArrayOutputStream()
+ System.setErr(new PrintStream(baos))
+ log("Compiling Macros_1...")
+ if (compileMacros()) {
+ log("Compiling Test_2...")
+ if (compileTest()) log("Success!") else log("Failed...")
+ }
+ println("""macroSynthetic-.*?\.scala""".r.replaceAllIn(baos.toString, "<synthetic file name>"))
+ System.setErr(prevErr)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/macro-toplevel.check b/test/files/run/macro-toplevel.check
new file mode 100644
index 0000000000..257c3764fd
--- /dev/null
+++ b/test/files/run/macro-toplevel.check
@@ -0,0 +1,2 @@
+I've been created from Macros.foo
+I've been created from Macros.foo
diff --git a/test/files/run/macro-toplevel/Macros_1.scala b/test/files/run/macro-toplevel/Macros_1.scala
new file mode 100644
index 0000000000..f681c86735
--- /dev/null
+++ b/test/files/run/macro-toplevel/Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val msg = "I've been created from " + c.macroApplication
+ val Block(List(synthetic: ClassDef), _) = reify{ class SomeUniqueName { def hello = c.literal(msg).splice } }.tree
+ val ref = c.topLevelRef(synthetic.name) orElse c.introduceTopLevel(nme.EMPTY_PACKAGE_NAME.toString, synthetic)
+ c.Expr[String](Select(Apply(Select(New(ref), nme.CONSTRUCTOR), List()), TermName("hello")))
+ }
+
+ def foo = macro impl
+ def foo2 = macro impl
+}
diff --git a/test/files/run/macro-toplevel/Test_2.scala b/test/files/run/macro-toplevel/Test_2.scala
new file mode 100644
index 0000000000..eee2d6ae13
--- /dev/null
+++ b/test/files/run/macro-toplevel/Test_2.scala
@@ -0,0 +1,6 @@
+import Macros._
+
+object Test extends App {
+ println(Macros.foo)
+ println(Macros.foo2)
+} \ No newline at end of file
diff --git a/test/files/run/macro-typecheck-implicitsdisabled.check b/test/files/run/macro-typecheck-implicitsdisabled.check
index c4fa2c5c28..91d8fabd72 100644
--- a/test/files/run/macro-typecheck-implicitsdisabled.check
+++ b/test/files/run/macro-typecheck-implicitsdisabled.check
@@ -1,2 +1,2 @@
-scala.this.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+scala.this.Predef.ArrowAssoc[Int](1).->[Int](2)
scala.reflect.macros.TypecheckException: value -> is not a member of Int
diff --git a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
index 633cb930fc..dbeb7efbc0 100644
--- a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
@@ -4,7 +4,7 @@ object Macros {
def impl_with_implicits_enabled(c: Context) = {
import c.universe._
- val tree1 = Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ val tree1 = Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
val ttree1 = c.typeCheck(tree1, withImplicitViewsDisabled = false)
c.literal(ttree1.toString)
}
@@ -15,7 +15,7 @@ object Macros {
import c.universe._
try {
- val tree2 = Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ val tree2 = Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
val ttree2 = c.typeCheck(tree2, withImplicitViewsDisabled = true)
c.literal(ttree2.toString)
} catch {
diff --git a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
index f693ad78cc..ff535fea8d 100644
--- a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
@@ -4,8 +4,8 @@ object Macros {
def impl_with_macros_enabled(c: Context) = {
import c.universe._
- val ru = Select(Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("package")), newTermName("universe"))
- val tree1 = Apply(Select(ru, newTermName("reify")), List(Literal(Constant(2))))
+ val ru = Select(Select(Select(Select(Ident(TermName("scala")), TermName("reflect")), TermName("runtime")), TermName("package")), TermName("universe"))
+ val tree1 = Apply(Select(ru, TermName("reify")), List(Literal(Constant(2))))
val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
c.literal(ttree1.toString)
}
@@ -21,7 +21,7 @@ object Macros {
val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
build.setTypeSignature(ru, rutpe)
- val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
+ val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Literal(Constant(2))))
val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
c.literal(ttree2.toString)
}
diff --git a/test/files/run/macro-typecheck-macrosdisabled2.check b/test/files/run/macro-typecheck-macrosdisabled2.check
index 27d15d47af..75fd693722 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2.check
+++ b/test/files/run/macro-typecheck-macrosdisabled2.check
@@ -10,7 +10,7 @@
def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
val $u: U = $m$untyped.universe;
val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+ $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.TermName.apply("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
}
};
new $treecreator1()
diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
index 1dbf5a1a87..a96e0c53b6 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
@@ -4,8 +4,8 @@ object Macros {
def impl_with_macros_enabled(c: Context) = {
import c.universe._
- val ru = Select(Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("package")), newTermName("universe"))
- val tree1 = Apply(Select(ru, newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val ru = Select(Select(Select(Select(Ident(TermName("scala")), TermName("reflect")), TermName("runtime")), TermName("package")), TermName("universe"))
+ val tree1 = Apply(Select(ru, TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
c.literal(ttree1.toString)
}
@@ -21,7 +21,7 @@ object Macros {
val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
build.setTypeSignature(ru, rutpe)
- val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
c.literal(ttree2.toString)
}
diff --git a/test/files/run/map_java_conversions.scala b/test/files/run/map_java_conversions.scala
index 7714b2cc74..751167c04d 100644
--- a/test/files/run/map_java_conversions.scala
+++ b/test/files/run/map_java_conversions.scala
@@ -19,7 +19,7 @@ object Test {
val concMap = new java.util.concurrent.ConcurrentHashMap[String, String]
test(concMap)
- val cmap = asScalaConcurrentMap(concMap)
+ val cmap = mapAsScalaConcurrentMap(concMap)
cmap.putIfAbsent("absentKey", "absentValue")
cmap.put("somekey", "somevalue")
assert(cmap.remove("somekey", "somevalue") == true)
diff --git a/test/files/run/mutable-treeset.scala b/test/files/run/mutable-treeset.scala
new file mode 100644
index 0000000000..c9918cba96
--- /dev/null
+++ b/test/files/run/mutable-treeset.scala
@@ -0,0 +1,145 @@
+import scala.collection.mutable.TreeSet
+
+object Test extends App {
+ val list = List(6,5,4,3,2,1,1,2,3,4,5,6,6,5,4,3,2,1)
+ val distinct = list.distinct
+ val sorted = distinct.sorted
+
+ // sublist stuff for a single level of slicing
+ val min = list.min
+ val max = list.max
+ val nonlist = ((min - 10) until (max + 20) filterNot list.contains).toList
+ val sublist = list filter {x => x >=(min + 1) && x < max}
+ val distinctSublist = sublist.distinct
+ val subnonlist = min :: max :: nonlist
+ val subsorted = distinctSublist.sorted
+
+ // subsublist for a 2nd level of slicing
+ val almostmin = sublist.min
+ val almostmax = sublist.max
+ val subsublist = sublist filter {x => x >=(almostmin + 1) && x < almostmax}
+ val distinctSubsublist = subsublist.distinct
+ val subsubnonlist = almostmin :: almostmax :: subnonlist
+ val subsubsorted = distinctSubsublist.sorted
+
+ def testSize {
+ def check(set : TreeSet[Int], list: List[Int]) {
+ assert(set.size == list.size, s"$set had size ${set.size} while $list had size ${list.size}")
+ }
+
+ check(TreeSet[Int](), List[Int]())
+ val set = TreeSet(list:_*)
+ check(set, distinct)
+ check(set.clone, distinct)
+
+ val subset = set from (min + 1) until max
+ check(subset, distinctSublist)
+ check(subset.clone, distinctSublist)
+
+ val subsubset = subset from (almostmin + 1) until almostmax
+ check(subsubset, distinctSubsublist)
+ check(subsubset.clone, distinctSubsublist)
+ }
+
+ def testContains {
+ def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) {
+ assert(list forall set.apply, s"$set did not contain all elements of $list using apply")
+ assert(list forall set.contains, s"$set did not contain all elements of $list using contains")
+ assert(!(nonlist exists set.apply), s"$set had an element from $nonlist using apply")
+ assert(!(nonlist exists set.contains), s"$set had an element from $nonlist using contains")
+ }
+
+ val set = TreeSet(list:_*)
+ check(set, list, nonlist)
+ check(set.clone, list, nonlist)
+
+ val subset = set from (min + 1) until max
+ check(subset, sublist, subnonlist)
+ check(subset.clone, sublist, subnonlist)
+
+ val subsubset = subset from (almostmin + 1) until almostmax
+ check(subsubset, subsublist, subsubnonlist)
+ check(subsubset.clone, subsublist, subsubnonlist)
+ }
+
+ def testAdd {
+ def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) {
+ var builtList = List[Int]()
+ for (x <- list) {
+ set += x
+ builtList = (builtList :+ x).distinct.sorted filterNot nonlist.contains
+ assert(builtList forall set.apply, s"$set did not contain all elements of $builtList using apply")
+ assert(builtList.size == set.size, s"$set had size ${set.size} while $builtList had size ${builtList.size}")
+ }
+ assert(!(nonlist exists set.apply), s"$set had an element from $nonlist using apply")
+ assert(!(nonlist exists set.contains), s"$set had an element from $nonlist using contains")
+ }
+
+ val set = TreeSet[Int]()
+ val clone = set.clone
+ val subset = set.clone from (min + 1) until max
+ val subclone = subset.clone
+ val subsubset = subset.clone from (almostmin + 1) until almostmax
+ val subsubclone = subsubset.clone
+
+ check(set, list, nonlist)
+ check(clone, list, nonlist)
+
+ check(subset, list, subnonlist)
+ check(subclone, list, subnonlist)
+
+ check(subsubset, list, subsubnonlist)
+ check(subsubclone, list, subsubnonlist)
+ }
+
+ def testRemove {
+ def check(set: TreeSet[Int], sorted: List[Int]) {
+ var builtList = sorted
+ for (x <- list) {
+ set remove x
+ builtList = builtList filterNot (_ == x)
+ assert(builtList forall set.apply, s"$set did not contain all elements of $builtList using apply")
+ assert(builtList.size == set.size, s"$set had size $set.size while $builtList had size $builtList.size")
+ }
+ }
+ val set = TreeSet(list:_*)
+ val clone = set.clone
+ val subset = set.clone from (min + 1) until max
+ val subclone = subset.clone
+ val subsubset = subset.clone from (almostmin + 1) until almostmax
+ val subsubclone = subsubset.clone
+
+ check(set, sorted)
+ check(clone, sorted)
+
+ check(subset, subsorted)
+ check(subclone, subsorted)
+
+ check(subsubset, subsubsorted)
+ check(subsubclone, subsubsorted)
+ }
+
+ def testIterator {
+ def check(set: TreeSet[Int], list: List[Int]) {
+ val it = set.iterator.toList
+ assert(it == list, s"$it did not equal $list")
+ }
+ val set = TreeSet(list: _*)
+ check(set, sorted)
+ check(set.clone, sorted)
+
+ val subset = set from (min + 1) until max
+ check(subset, subsorted)
+ check(subset.clone, subsorted)
+
+ val subsubset = subset from (almostmin + 1) until almostmax
+ check(subsubset, subsubsorted)
+ check(subsubset.clone, subsubsorted)
+ }
+
+ testSize
+ testContains
+ testAdd
+ testRemove
+ testIterator
+}
diff --git a/test/files/run/no-pickle-skolems.check b/test/files/run/no-pickle-skolems.check
new file mode 100644
index 0000000000..d64066171a
--- /dev/null
+++ b/test/files/run/no-pickle-skolems.check
@@ -0,0 +1 @@
+OK!
diff --git a/test/files/run/no-pickle-skolems/Source_1.scala b/test/files/run/no-pickle-skolems/Source_1.scala
new file mode 100644
index 0000000000..1b4cbfa788
--- /dev/null
+++ b/test/files/run/no-pickle-skolems/Source_1.scala
@@ -0,0 +1,5 @@
+package s
+
+trait Foo { def to[CC[X]](implicit cc: CC[Int]): Unit }
+
+class Bar extends Foo { def to[CC[X]](implicit cc: CC[Int]): Unit = ??? }
diff --git a/test/files/run/no-pickle-skolems/Test_2.scala b/test/files/run/no-pickle-skolems/Test_2.scala
new file mode 100644
index 0000000000..90bb4c4f88
--- /dev/null
+++ b/test/files/run/no-pickle-skolems/Test_2.scala
@@ -0,0 +1,37 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ /** Collects symbols by the given name, even if they're not
+ * named CC.
+ */
+ def collectSymbols[T: TypeTag](inMethod: TermName, name: String): List[String] = {
+ val m = typeOf[T] member inMethod typeSignatureIn typeOf[T]
+ var buf: List[Symbol] = Nil
+ var seen: Set[Symbol] = Set()
+ def id(s: Symbol): Int = s.asInstanceOf[{ def id: Int }].id
+
+ def check(s: Symbol) {
+ if (!seen(s)) {
+ seen += s
+ if (s.name.toString == name) buf ::= s
+ }
+ }
+ def loop(t: Type) {
+ t match {
+ case TypeRef(pre, sym, args) => loop(pre) ; check(sym) ; args foreach loop
+ case PolyType(tparams, restpe) => tparams foreach { tp => check(tp) ; check(tp.owner) ; loop(tp.typeSignature) } ; loop(restpe)
+ case MethodType(params, restpe) => params foreach { p => check(p) ; loop(p.typeSignature) } ; loop(restpe)
+ case _ =>
+ }
+ }
+ loop(m)
+
+ buf.reverse.distinct map (s => s.name + "#" + id(s))
+ }
+
+ def main(args: Array[String]): Unit = {
+ val syms = collectSymbols[s.Bar]("to", "CC")
+ assert(syms.size == 1, syms)
+ println("OK!")
+ }
+}
diff --git a/test/files/run/patmat_unapp_abstype-old.check b/test/files/run/patmat_unapp_abstype-old.check
deleted file mode 100644
index 72239d16cd..0000000000
--- a/test/files/run/patmat_unapp_abstype-old.check
+++ /dev/null
@@ -1,4 +0,0 @@
-TypeRef
-none of the above
-Bar
-Foo
diff --git a/test/files/run/patmat_unapp_abstype-old.flags b/test/files/run/patmat_unapp_abstype-old.flags
deleted file mode 100644
index ba80cad69b..0000000000
--- a/test/files/run/patmat_unapp_abstype-old.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xoldpatmat
diff --git a/test/files/run/patmat_unapp_abstype-old.scala b/test/files/run/patmat_unapp_abstype-old.scala
deleted file mode 100644
index 45496f08a2..0000000000
--- a/test/files/run/patmat_unapp_abstype-old.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-// abstract types and extractors, oh my!
-trait TypesAPI {
- trait Type
-
- // an alternative fix (implemented in the virtual pattern matcher, is to replace the isInstanceOf by a manifest-based run-time test)
- // that's what typeRefMani is for
- type TypeRef <: Type //; implicit def typeRefMani: Manifest[TypeRef]
- val TypeRef: TypeRefExtractor; trait TypeRefExtractor {
- def apply(x: Int): TypeRef
- def unapply(x: TypeRef): Option[(Int)]
- }
-
- // just for illustration, should follow the same pattern as TypeRef
- case class MethodType(n: Int) extends Type
-}
-
-// user should not be exposed to the implementation
-trait TypesUser extends TypesAPI {
- def shouldNotCrash(tp: Type): Unit = {
- tp match {
- case TypeRef(x) => println("TypeRef")
- // the above checks tp.isInstanceOf[TypeRef], which is erased to tp.isInstanceOf[Type]
- // before calling TypeRef.unapply(tp), which will then crash unless tp.isInstanceOf[TypesImpl#TypeRef] (which is not implied by tp.isInstanceOf[Type])
- // tp.isInstanceOf[TypesImpl#TypeRef] is equivalent to classOf[TypesImpl#TypeRef].isAssignableFrom(tp.getClass)
- // this is equivalent to manifest
- // it is NOT equivalent to manifest[Type] <:< typeRefMani
- case MethodType(x) => println("MethodType")
- case _ => println("none of the above")
- }
- }
-}
-
-trait TypesImpl extends TypesAPI {
- object TypeRef extends TypeRefExtractor // this will have a bridged unapply(x: Type) = unapply(x.asInstanceOf[TypeRef])
- case class TypeRef(n: Int) extends Type // this has a bridge from TypesAPI#Type to TypesImpl#TypeRef
- // --> the cast in the bridge will fail because the pattern matcher can't type test against the abstract types in TypesUser
- //lazy val typeRefMani = manifest[TypeRef]
-}
-
-trait Foos {
- trait Bar
- type Foo <: Bar
- trait FooExtractor {
- def unapply(foo: Foo): Option[Int]
- }
- val Foo: FooExtractor
-}
-
-trait RealFoos extends Foos {
- class Foo(val x: Int) extends Bar
- object Foo extends FooExtractor {
- def unapply(foo: Foo): Option[Int] = Some(foo.x)
- }
-}
-
-trait Intermed extends Foos {
- def crash(bar: Bar): Unit =
- bar match {
- case Foo(x) => println("Foo")
- case _ => println("Bar")
- }
-}
-
-object TestUnappStaticallyKnownSynthetic extends TypesImpl with TypesUser {
- def test() = {
- shouldNotCrash(TypeRef(10)) // should and does print "TypeRef"
- // once #1697/#2337 are fixed, this should generate the correct output
- shouldNotCrash(MethodType(10)) // should print "MethodType" but prints "none of the above" -- good one, pattern matcher!
- }
-}
-
-object TestUnappDynamicSynth extends RealFoos with Intermed {
- case class FooToo(n: Int) extends Bar
- def test() = {
- crash(FooToo(10))
- crash(new Foo(5))
- }
-}
-
-object Test extends App {
- TestUnappStaticallyKnownSynthetic.test()
- TestUnappDynamicSynth.test()
-}
diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check
index bdf76ddce1..d472c569d2 100644
--- a/test/files/run/programmatic-main.check
+++ b/test/files/run/programmatic-main.check
@@ -1,31 +1,31 @@
- phase name id description
- ---------- -- -----------
- parser 1 parse source into ASTs, perform simple desugaring
- namer 2 resolve names, attach symbols to named trees
- packageobjects 3 load package objects
- typer 4 the meat and potatoes: type the trees
- patmat 5 translate match expressions
- superaccessors 6 add super accessors in traits and nested classes
- extmethods 7 add extension methods for inline classes
- pickler 8 serialize symbol tables
- refchecks 9 reference/override checking, translate nested objects
- uncurry 10 uncurry, translate function values to anonymous classes
- tailcalls 11 replace tail calls by jumps
- specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers, translate patterns
- erasure 14 erase types, add interfaces for traits
- posterasure 15 clean up erased inline classes
- lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
- lambdalift 17 move nested functions to top level
- constructors 18 move field definitions into constructors
- flatten 19 eliminate inner classes
- mixin 20 mixin composition
- cleanup 21 platform-specific cleanups, generate reflective calls
- icode 22 generate portable intermediate code
- inliner 23 optimization: do inlining
-inlineExceptionHandlers 24 optimization: inline exception handlers
- closelim 25 optimization: eliminate uncalled closures
- dce 26 optimization: eliminate dead code
- jvm 27 generate JVM bytecode
- terminal 28 The last phase in the compiler chain
+ phase name id description
+ ---------- -- -----------
+ parser 1 parse source into ASTs, perform simple desugaring
+ namer 2 resolve names, attach symbols to named trees
+packageobjects 3 load package objects
+ typer 4 the meat and potatoes: type the trees
+ patmat 5 translate match expressions
+superaccessors 6 add super accessors in traits and nested classes
+ extmethods 7 add extension methods for inline classes
+ pickler 8 serialize symbol tables
+ refchecks 9 reference/override checking, translate nested objects
+ uncurry 10 uncurry, translate function values to anonymous classes
+ tailcalls 11 replace tail calls by jumps
+ specialize 12 @specialized-driven class and method specialization
+ explicitouter 13 this refs to outer pointers, translate patterns
+ erasure 14 erase types, add interfaces for traits
+ posterasure 15 clean up erased inline classes
+ lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ lambdalift 17 move nested functions to top level
+ constructors 18 move field definitions into constructors
+ flatten 19 eliminate inner classes
+ mixin 20 mixin composition
+ cleanup 21 platform-specific cleanups, generate reflective calls
+ icode 22 generate portable intermediate code
+ inliner 23 optimization: do inlining
+inlinehandlers 24 optimization: inline exception handlers
+ closelim 25 optimization: eliminate uncalled closures
+ dce 26 optimization: eliminate dead code
+ jvm 27 generate JVM bytecode
+ terminal 28 The last phase in the compiler chain
diff --git a/test/files/run/reflection-allmirrors-tostring.scala b/test/files/run/reflection-allmirrors-tostring.scala
index 73afff291c..0ca387a6b1 100644
--- a/test/files/run/reflection-allmirrors-tostring.scala
+++ b/test/files/run/reflection-allmirrors-tostring.scala
@@ -26,18 +26,18 @@ object Test extends App {
println(cm.reflect(new C))
val im = cm.reflect(new C)
- println(im.reflectField(typeOf[C].member(newTermName("f1")).asTerm))
- println(im.reflectField(typeOf[C].member(newTermName("f2")).asTerm))
- println(im.reflectMethod(typeOf[C].member(newTermName("m1")).asMethod))
- println(im.reflectMethod(typeOf[C].member(newTermName("m2")).asMethod))
- println(im.reflectMethod(typeOf[C].member(newTermName("m3")).asMethod))
- println(im.reflectMethod(typeOf[C].member(newTermName("m4")).asMethod))
- println(im.reflectMethod(typeOf[C].member(newTermName("m5")).asMethod))
- println(im.reflectClass(typeOf[C].member(newTypeName("C")).asClass))
- println(im.reflectModule(typeOf[C].member(newTermName("M")).asModule))
+ println(im.reflectField(typeOf[C].member(TermName("f1")).asTerm))
+ println(im.reflectField(typeOf[C].member(TermName("f2")).asTerm))
+ println(im.reflectMethod(typeOf[C].member(TermName("m1")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(TermName("m2")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(TermName("m3")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(TermName("m4")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(TermName("m5")).asMethod))
+ println(im.reflectClass(typeOf[C].member(TypeName("C")).asClass))
+ println(im.reflectModule(typeOf[C].member(TermName("M")).asModule))
val c = cm.staticClass("C")
- val cc = typeOf[C].member(newTypeName("C")).asClass
+ val cc = typeOf[C].member(TypeName("C")).asClass
println(cm.reflectClass(c).reflectConstructor(c.typeSignature.member(nme.CONSTRUCTOR).asMethod))
println(im.reflectClass(cc).reflectConstructor(cc.typeSignature.member(nme.CONSTRUCTOR).asMethod))
} \ No newline at end of file
diff --git a/test/files/run/reflection-enclosed-basic.scala b/test/files/run/reflection-enclosed-basic.scala
index 1dcb6c2a27..7b9e0c20dc 100644
--- a/test/files/run/reflection-enclosed-basic.scala
+++ b/test/files/run/reflection-enclosed-basic.scala
@@ -12,7 +12,7 @@ private object B6 extends B2 { override def toString = "B6"; override def foo =
object Test extends App {
def testMethodInvocation(instance: Any) = {
val instanceMirror = cm.reflect(instance)
- val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val method = instanceMirror.symbol.typeSignature.declaration(TermName("foo")).asMethod
val methodMirror = instanceMirror.reflectMethod(method)
println(methodMirror())
}
@@ -20,7 +20,7 @@ object Test extends App {
def testNestedClass(name: String) = {
val sym = cm.staticClass(name)
println(sym)
- val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod
val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
val instance = ctorMirror()
println(instance)
diff --git a/test/files/run/reflection-enclosed-inner-basic.scala b/test/files/run/reflection-enclosed-inner-basic.scala
index 2b2c701993..c1cf9bc336 100644
--- a/test/files/run/reflection-enclosed-inner-basic.scala
+++ b/test/files/run/reflection-enclosed-inner-basic.scala
@@ -18,15 +18,15 @@ object Test extends App {
def testMethodInvocation(instance: Any) = {
val instanceMirror = cm.reflect(instance)
- val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val method = instanceMirror.symbol.typeSignature.declaration(TermName("foo")).asMethod
val methodMirror = instanceMirror.reflectMethod(method)
println(methodMirror())
}
def testInnerClass(name: String) = {
- val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ val sym = b.typeSignature.declaration(TypeName(name)).asClass
println(sym)
- val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod
val ctorMirror = cm.reflect(new B).reflectClass(sym).reflectConstructor(ctor)
val instance = ctorMirror()
println(instance)
@@ -37,7 +37,7 @@ object Test extends App {
testInnerClass("B2")
def testInnerModule(name: String) = {
- val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ val sym = b.typeSignature.declaration(TermName(name)).asModule
println(sym)
val moduleMirror = cm.reflect(new B).reflectModule(sym)
val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-inner-inner-basic.scala b/test/files/run/reflection-enclosed-inner-inner-basic.scala
index 1b9e19d37d..8a73fac522 100644
--- a/test/files/run/reflection-enclosed-inner-inner-basic.scala
+++ b/test/files/run/reflection-enclosed-inner-inner-basic.scala
@@ -20,15 +20,15 @@ object Test extends App {
def testMethodInvocation(instance: Any) = {
val instanceMirror = cm.reflect(instance)
- val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val method = instanceMirror.symbol.typeSignature.declaration(TermName("foo")).asMethod
val methodMirror = instanceMirror.reflectMethod(method)
println(methodMirror())
}
def testInnerClass(name: String) = {
- val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ val sym = b.typeSignature.declaration(TypeName(name)).asClass
println(sym)
- val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod
val outer1 = new B
val outer2 = new outer1.BB
val ctorMirror = cm.reflect(outer2).reflectClass(sym).reflectConstructor(ctor)
@@ -41,7 +41,7 @@ object Test extends App {
testInnerClass("B2")
def testInnerModule(name: String) = {
- val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ val sym = b.typeSignature.declaration(TermName(name)).asModule
println(sym)
val outer1 = new B
val outer2 = new outer1.BB
diff --git a/test/files/run/reflection-enclosed-inner-nested-basic.scala b/test/files/run/reflection-enclosed-inner-nested-basic.scala
index 2800ee2548..6c2fc6df7a 100644
--- a/test/files/run/reflection-enclosed-inner-nested-basic.scala
+++ b/test/files/run/reflection-enclosed-inner-nested-basic.scala
@@ -21,15 +21,15 @@ object Test extends App {
def testMethodInvocation(instance: Any) = {
val instanceMirror = cm.reflect(instance)
- val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val method = instanceMirror.symbol.typeSignature.declaration(TermName("foo")).asMethod
val methodMirror = instanceMirror.reflectMethod(method)
println(methodMirror())
}
def testNestedClass(name: String) = {
- val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ val sym = b.typeSignature.declaration(TypeName(name)).asClass
println(sym)
- val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod
val ctorMirror = cm.reflect(outer1.BB).reflectClass(sym).reflectConstructor(ctor)
val instance = ctorMirror()
println(instance)
@@ -40,7 +40,7 @@ object Test extends App {
testNestedClass("B2")
def testNestedModule(name: String) = {
- val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ val sym = b.typeSignature.declaration(TermName(name)).asModule
println(sym)
val moduleMirror = cm.reflect(outer1.BB).reflectModule(sym)
val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-nested-basic.scala b/test/files/run/reflection-enclosed-nested-basic.scala
index 8b740c2da2..180ac4ebee 100644
--- a/test/files/run/reflection-enclosed-nested-basic.scala
+++ b/test/files/run/reflection-enclosed-nested-basic.scala
@@ -18,15 +18,15 @@ object Test extends App {
def testMethodInvocation(instance: Any) = {
val instanceMirror = cm.reflect(instance)
- val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val method = instanceMirror.symbol.typeSignature.declaration(TermName("foo")).asMethod
val methodMirror = instanceMirror.reflectMethod(method)
println(methodMirror())
}
def testNestedClass(name: String) = {
- val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ val sym = b.typeSignature.declaration(TypeName(name)).asClass
println(sym)
- val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod
val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
val instance = ctorMirror()
println(instance)
@@ -37,7 +37,7 @@ object Test extends App {
testNestedClass("B2")
def testNestedModule(name: String) = {
- val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ val sym = b.typeSignature.declaration(TermName(name)).asModule
println(sym)
val moduleMirror = cm.reflectModule(sym)
val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-nested-inner-basic.scala b/test/files/run/reflection-enclosed-nested-inner-basic.scala
index 7466733d37..2558b8035a 100644
--- a/test/files/run/reflection-enclosed-nested-inner-basic.scala
+++ b/test/files/run/reflection-enclosed-nested-inner-basic.scala
@@ -20,15 +20,15 @@ object Test extends App {
def testMethodInvocation(instance: Any) = {
val instanceMirror = cm.reflect(instance)
- val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val method = instanceMirror.symbol.typeSignature.declaration(TermName("foo")).asMethod
val methodMirror = instanceMirror.reflectMethod(method)
println(methodMirror())
}
def testInnerClass(name: String) = {
- val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ val sym = b.typeSignature.declaration(TypeName(name)).asClass
println(sym)
- val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod
val ctorMirror = cm.reflect(new B.BB).reflectClass(sym).reflectConstructor(ctor)
val instance = ctorMirror()
println(instance)
@@ -39,7 +39,7 @@ object Test extends App {
testInnerClass("B2")
def testInnerModule(name: String) = {
- val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ val sym = b.typeSignature.declaration(TermName(name)).asModule
println(sym)
val moduleMirror = cm.reflect(new B.BB).reflectModule(sym)
val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-enclosed-nested-nested-basic.scala b/test/files/run/reflection-enclosed-nested-nested-basic.scala
index 8335ea482a..b4711c9a8c 100644
--- a/test/files/run/reflection-enclosed-nested-nested-basic.scala
+++ b/test/files/run/reflection-enclosed-nested-nested-basic.scala
@@ -20,15 +20,15 @@ object Test extends App {
def testMethodInvocation(instance: Any) = {
val instanceMirror = cm.reflect(instance)
- val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val method = instanceMirror.symbol.typeSignature.declaration(TermName("foo")).asMethod
val methodMirror = instanceMirror.reflectMethod(method)
println(methodMirror())
}
def testNestedClass(name: String) = {
- val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ val sym = b.typeSignature.declaration(TypeName(name)).asClass
println(sym)
- val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod
val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
val instance = ctorMirror()
println(instance)
@@ -39,7 +39,7 @@ object Test extends App {
testNestedClass("B2")
def testNestedModule(name: String) = {
- val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ val sym = b.typeSignature.declaration(TermName(name)).asModule
println(sym)
val moduleMirror = cm.reflectModule(sym)
val instance = moduleMirror.instance
diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check
index 65b525731f..b3b66f48d6 100644
--- a/test/files/run/reflection-equality.check
+++ b/test/files/run/reflection-equality.check
@@ -29,7 +29,7 @@ scala.AnyRef {
def methodIntIntInt(x: scala.Int,y: scala.Int): scala.Int
}
-scala> val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
+scala> val ms: MethodSymbol = ts.declaration(TermName("methodIntIntInt")).asMethod
ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt
scala> val MethodType( _, t1 ) = ms.typeSignature
diff --git a/test/files/run/reflection-equality.scala b/test/files/run/reflection-equality.scala
index 8fc82721e7..40f116bb53 100644
--- a/test/files/run/reflection-equality.scala
+++ b/test/files/run/reflection-equality.scala
@@ -11,7 +11,7 @@ object Test extends ReplTest {
|def im: InstanceMirror = cm.reflect(new X)
|val cs: ClassSymbol = im.symbol
|val ts: Type = cs.typeSignature
- |val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
+ |val ms: MethodSymbol = ts.declaration(TermName("methodIntIntInt")).asMethod
|val MethodType( _, t1 ) = ms.typeSignature
|val t2 = typeOf[scala.Int]
|t1 == t2
diff --git a/test/files/run/reflection-fieldmirror-accessorsareokay.scala b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
index 16354025f3..3926ab7835 100644
--- a/test/files/run/reflection-fieldmirror-accessorsareokay.scala
+++ b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
@@ -24,6 +24,6 @@ object Test extends App {
}
}
- test(cs.typeSignature.declaration(newTermName("x")).asTerm)
- test(cs.typeSignature.declaration(newTermName("x_$eq")).asTerm)
+ test(cs.typeSignature.declaration(TermName("x")).asTerm)
+ test(cs.typeSignature.declaration(TermName("x_$eq")).asTerm)
}
diff --git a/test/files/run/reflection-fieldmirror-ctorparam.scala b/test/files/run/reflection-fieldmirror-ctorparam.scala
index b9d50fe97b..608adad27b 100644
--- a/test/files/run/reflection-fieldmirror-ctorparam.scala
+++ b/test/files/run/reflection-fieldmirror-ctorparam.scala
@@ -10,7 +10,7 @@ object Test extends App {
val im: InstanceMirror = cm.reflect(a)
val cs = im.symbol
- val f = cs.typeSignature.declaration(newTermName("x")).asTerm
+ val f = cs.typeSignature.declaration(TermName("x")).asTerm
try {
val fm: FieldMirror = im.reflectField(f)
println(fm.get)
diff --git a/test/files/run/reflection-fieldmirror-getsetval.scala b/test/files/run/reflection-fieldmirror-getsetval.scala
index 67c54d9708..9cacb7080b 100644
--- a/test/files/run/reflection-fieldmirror-getsetval.scala
+++ b/test/files/run/reflection-fieldmirror-getsetval.scala
@@ -10,7 +10,7 @@ object Test extends App {
val im: InstanceMirror = cm.reflect(a)
val cs = im.symbol
- val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+ val f = cs.typeSignature.declaration(TermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
val fm: FieldMirror = im.reflectField(f)
try {
println(fm.get)
diff --git a/test/files/run/reflection-fieldmirror-getsetvar.scala b/test/files/run/reflection-fieldmirror-getsetvar.scala
index abcf396dd1..52c13a73bb 100644
--- a/test/files/run/reflection-fieldmirror-getsetvar.scala
+++ b/test/files/run/reflection-fieldmirror-getsetvar.scala
@@ -10,7 +10,7 @@ object Test extends App {
val im: InstanceMirror = cm.reflect(a)
val cs = im.symbol
- val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+ val f = cs.typeSignature.declaration(TermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
val fm: FieldMirror = im.reflectField(f)
println(fm.get)
fm.set(2)
diff --git a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
index 2b4a9bb55e..e070cdcfa3 100644
--- a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
+++ b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
@@ -10,7 +10,7 @@ object Test extends App {
val im: InstanceMirror = cm.reflect(a)
val cs = im.symbol
- val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+ val f = cs.typeSignature.declaration(TermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
val fm: FieldMirror = im.reflectField(f)
println(fm.symbol.isVar)
}
diff --git a/test/files/run/reflection-fieldmirror-privatethis.scala b/test/files/run/reflection-fieldmirror-privatethis.scala
index ab838dbb1b..89948772b1 100644
--- a/test/files/run/reflection-fieldmirror-privatethis.scala
+++ b/test/files/run/reflection-fieldmirror-privatethis.scala
@@ -10,7 +10,7 @@ object Test extends App {
val im: InstanceMirror = cm.reflect(a)
val cs = im.symbol
- val f = cs.typeSignature.declaration(newTermName("x")).asTerm
+ val f = cs.typeSignature.declaration(TermName("x")).asTerm
val fm: FieldMirror = im.reflectField(f)
println(fm.symbol.isVar)
println(fm.get)
diff --git a/test/files/run/reflection-fieldsymbol-navigation.scala b/test/files/run/reflection-fieldsymbol-navigation.scala
index 4448724988..33dc18a7e3 100644
--- a/test/files/run/reflection-fieldsymbol-navigation.scala
+++ b/test/files/run/reflection-fieldsymbol-navigation.scala
@@ -5,7 +5,7 @@ class C {
}
object Test extends App {
- val x = typeOf[C].member(newTermName("x")).asTerm
+ val x = typeOf[C].member(TermName("x")).asTerm
println(x)
println(x.isVar)
println(x.accessed)
diff --git a/test/files/run/reflection-implClass.scala b/test/files/run/reflection-implClass.scala
index b3c0081ccf..db211fd9a8 100644
--- a/test/files/run/reflection-implClass.scala
+++ b/test/files/run/reflection-implClass.scala
@@ -10,19 +10,19 @@ object Test extends App with Outer {
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
- assert(cm.classSymbol(classTag[Foo].runtimeClass).typeSignature.declaration(newTermName("bar")).typeSignature ==
- cm.classSymbol(classTag[Bar].runtimeClass).typeSignature.declaration(newTermName("foo")).typeSignature)
+ assert(cm.classSymbol(classTag[Foo].runtimeClass).typeSignature.declaration(TermName("bar")).typeSignature ==
+ cm.classSymbol(classTag[Bar].runtimeClass).typeSignature.declaration(TermName("foo")).typeSignature)
val s1 = implClass(classTag[Foo].runtimeClass)
assert(s1 != NoSymbol)
assert(s1.typeSignature != NoType)
assert(s1.companionSymbol.typeSignature != NoType)
- assert(s1.companionSymbol.typeSignature.declaration(newTermName("bar")) != NoSymbol)
+ assert(s1.companionSymbol.typeSignature.declaration(TermName("bar")) != NoSymbol)
val s2 = implClass(classTag[Bar].runtimeClass)
assert(s2 != NoSymbol)
assert(s2.typeSignature != NoType)
assert(s2.companionSymbol.typeSignature != NoType)
- assert(s2.companionSymbol.typeSignature.declaration(newTermName("foo")) != NoSymbol)
+ assert(s2.companionSymbol.typeSignature.declaration(TermName("foo")) != NoSymbol)
def implClass(clazz: Class[_]) = {
val implClass = Class.forName(clazz.getName + "$class")
cm.classSymbol(implClass)
diff --git a/test/files/run/reflection-magicsymbols-invoke.scala b/test/files/run/reflection-magicsymbols-invoke.scala
index 5f39370708..ff3992709f 100644
--- a/test/files/run/reflection-magicsymbols-invoke.scala
+++ b/test/files/run/reflection-magicsymbols-invoke.scala
@@ -28,7 +28,7 @@ object Test extends App {
val ctor = tpe.declaration(nme.CONSTRUCTOR).asMethod
cm.reflectClass(ctor.owner.asClass).reflectConstructor(ctor)(args: _*)
} else {
- val meth = tpe.declaration(newTermName(method).encodedName.toTermName).asMethod
+ val meth = tpe.declaration(TermName(method).encodedName.toTermName).asMethod
cm.reflect(receiver).reflectMethod(meth)(args: _*)
}
})
@@ -54,7 +54,7 @@ object Test extends App {
println("it's important to print the list of AnyVal's members")
println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
typeOf[AnyVal].declarations.toList.sortBy(key).foreach(sym => println(key(sym)))
- test(typeOf[AnyVal], null, "<init>")
+ test(typeOf[AnyVal], null, nme.CONSTRUCTOR.toString)
test(typeOf[AnyVal], 2, "getClass")
println("============\nAnyRef")
diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check
index 2535e3f43d..bb8bdc9dd9 100644
--- a/test/files/run/reflection-magicsymbols-repl.check
+++ b/test/files/run/reflection-magicsymbols-repl.check
@@ -19,7 +19,7 @@ scala> class A {
defined class A
scala> def test(n: Int): Unit = {
- val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+ val sig = typeOf[A] member TermName("foo" + n) typeSignature
val x = sig.asInstanceOf[MethodType].params.head
println(x.typeSignature)
}
diff --git a/test/files/run/reflection-magicsymbols-repl.scala b/test/files/run/reflection-magicsymbols-repl.scala
index 26127b8661..6a432c2664 100644
--- a/test/files/run/reflection-magicsymbols-repl.scala
+++ b/test/files/run/reflection-magicsymbols-repl.scala
@@ -14,7 +14,7 @@ object Test extends ReplTest {
| def foo8(x: Singleton) = ???
|}
|def test(n: Int): Unit = {
- | val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+ | val sig = typeOf[A] member TermName("foo" + n) typeSignature
| val x = sig.asInstanceOf[MethodType].params.head
| println(x.typeSignature)
|}
diff --git a/test/files/run/reflection-magicsymbols-vanilla.scala b/test/files/run/reflection-magicsymbols-vanilla.scala
index 32819dcc46..26b70460eb 100644
--- a/test/files/run/reflection-magicsymbols-vanilla.scala
+++ b/test/files/run/reflection-magicsymbols-vanilla.scala
@@ -12,7 +12,7 @@ class A {
object Test extends App {
import scala.reflect.runtime.universe._
def test(n: Int): Unit = {
- val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+ val sig = typeOf[A] member TermName("foo" + n) typeSignature
val x = sig.asInstanceOf[MethodType].params.head
println(x.typeSignature)
}
diff --git a/test/files/run/reflection-methodsymbol-params.scala b/test/files/run/reflection-methodsymbol-params.scala
index 45b1f9628f..baad8d6b9b 100644
--- a/test/files/run/reflection-methodsymbol-params.scala
+++ b/test/files/run/reflection-methodsymbol-params.scala
@@ -13,12 +13,12 @@ class C {
}
object Test extends App {
- println(typeOf[C].member(newTermName("x1")).asMethod.paramss)
- println(typeOf[C].member(newTermName("x2")).asMethod.paramss)
- println(typeOf[C].member(newTermName("x3")).asMethod.paramss)
- println(typeOf[C].member(newTermName("x4")).asMethod.paramss)
- println(typeOf[C].member(newTermName("y1")).asMethod.paramss)
- println(typeOf[C].member(newTermName("y2")).asMethod.paramss)
- println(typeOf[C].member(newTermName("y3")).asMethod.paramss)
- println(typeOf[C].member(newTermName("y4")).asMethod.paramss)
+ println(typeOf[C].member(TermName("x1")).asMethod.paramss)
+ println(typeOf[C].member(TermName("x2")).asMethod.paramss)
+ println(typeOf[C].member(TermName("x3")).asMethod.paramss)
+ println(typeOf[C].member(TermName("x4")).asMethod.paramss)
+ println(typeOf[C].member(TermName("y1")).asMethod.paramss)
+ println(typeOf[C].member(TermName("y2")).asMethod.paramss)
+ println(typeOf[C].member(TermName("y3")).asMethod.paramss)
+ println(typeOf[C].member(TermName("y4")).asMethod.paramss)
} \ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-returntype.scala b/test/files/run/reflection-methodsymbol-returntype.scala
index 392754dbe4..74a9e5dac0 100644
--- a/test/files/run/reflection-methodsymbol-returntype.scala
+++ b/test/files/run/reflection-methodsymbol-returntype.scala
@@ -13,12 +13,12 @@ class C {
}
object Test extends App {
- println(typeOf[C].member(newTermName("x1")).asMethod.returnType)
- println(typeOf[C].member(newTermName("x2")).asMethod.returnType)
- println(typeOf[C].member(newTermName("x3")).asMethod.returnType)
- println(typeOf[C].member(newTermName("x4")).asMethod.returnType)
- println(typeOf[C].member(newTermName("y1")).asMethod.returnType)
- println(typeOf[C].member(newTermName("y2")).asMethod.returnType)
- println(typeOf[C].member(newTermName("y3")).asMethod.returnType)
- println(typeOf[C].member(newTermName("y4")).asMethod.returnType)
+ println(typeOf[C].member(TermName("x1")).asMethod.returnType)
+ println(typeOf[C].member(TermName("x2")).asMethod.returnType)
+ println(typeOf[C].member(TermName("x3")).asMethod.returnType)
+ println(typeOf[C].member(TermName("x4")).asMethod.returnType)
+ println(typeOf[C].member(TermName("y1")).asMethod.returnType)
+ println(typeOf[C].member(TermName("y2")).asMethod.returnType)
+ println(typeOf[C].member(TermName("y3")).asMethod.returnType)
+ println(typeOf[C].member(TermName("y4")).asMethod.returnType)
} \ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-typeparams.scala b/test/files/run/reflection-methodsymbol-typeparams.scala
index bb0a3c3aec..56d37ebeaa 100644
--- a/test/files/run/reflection-methodsymbol-typeparams.scala
+++ b/test/files/run/reflection-methodsymbol-typeparams.scala
@@ -13,12 +13,12 @@ class C {
}
object Test extends App {
- println(typeOf[C].member(newTermName("x1")).asMethod.typeParams)
- println(typeOf[C].member(newTermName("x2")).asMethod.typeParams)
- println(typeOf[C].member(newTermName("x3")).asMethod.typeParams)
- println(typeOf[C].member(newTermName("x4")).asMethod.typeParams)
- println(typeOf[C].member(newTermName("y1")).asMethod.typeParams)
- println(typeOf[C].member(newTermName("y2")).asMethod.typeParams)
- println(typeOf[C].member(newTermName("y3")).asMethod.typeParams)
- println(typeOf[C].member(newTermName("y4")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("x1")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("x2")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("x3")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("x4")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("y1")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("y2")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("y3")).asMethod.typeParams)
+ println(typeOf[C].member(TermName("y4")).asMethod.typeParams)
} \ No newline at end of file
diff --git a/test/files/run/reflection-repl-classes.check b/test/files/run/reflection-repl-classes.check
index 2dd96a93bf..d70db59b85 100644
--- a/test/files/run/reflection-repl-classes.check
+++ b/test/files/run/reflection-repl-classes.check
@@ -19,10 +19,10 @@ scala> object defs {
val cm = reflect.runtime.currentMirror
val u = cm.universe
val im = cm.reflect(new B)
- val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
+ val method = im.symbol.typeSignature.member(u.TermName("foo")).asMethod
val mm = im.reflectMethod(method)
}
-defined module defs
+defined object defs
scala> import defs._
import defs._
diff --git a/test/files/run/reflection-repl-classes.scala b/test/files/run/reflection-repl-classes.scala
index 80e332cde3..4bfb980498 100644
--- a/test/files/run/reflection-repl-classes.scala
+++ b/test/files/run/reflection-repl-classes.scala
@@ -12,7 +12,7 @@ object Test extends ReplTest {
| val cm = reflect.runtime.currentMirror
| val u = cm.universe
| val im = cm.reflect(new B)
- | val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
+ | val method = im.symbol.typeSignature.member(u.TermName("foo")).asMethod
| val mm = im.reflectMethod(method)
|}
|import defs._
diff --git a/test/files/run/reflection-sanitychecks.scala b/test/files/run/reflection-sanitychecks.scala
index f817f23731..6d3daff1f7 100644
--- a/test/files/run/reflection-sanitychecks.scala
+++ b/test/files/run/reflection-sanitychecks.scala
@@ -32,14 +32,14 @@ object Test extends App {
def test(tpe: Type): Unit = {
def failsafe(action: => Any): Any = try action catch { case ex: Throwable => ex.toString }
println(s"=========members of ${tpe.typeSymbol.name} in a mirror of D=========")
- println("field #1: " + failsafe(im.reflectField(tpe.member(newTermName("foo")).asTerm).get))
- println("method #1: " + failsafe(im.reflectMethod(tpe.member(newTermName("bar")).asMethod)()))
- println("field #2: " + failsafe(im.reflectField(tpe.member(newTermName("quux")).asTerm).get))
- println("method #2: " + failsafe(im.reflectMethod(tpe.member(newTermName("baz")).asMethod)()))
- println("constructor #1: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(newTermName("bar")).asMethod)()))
- println("constructor #2: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(newTermName("<init>")).asMethod)()))
- println("class: " + failsafe(im.reflectClass(tpe.member(newTypeName("C")).asClass).reflectConstructor(typeOf[C].member(newTypeName("C")).asClass.typeSignature.member(newTermName("<init>")).asMethod)()))
- println("object: " + failsafe(im.reflectModule(tpe.member(newTermName("O")).asModule).instance))
+ println("field #1: " + failsafe(im.reflectField(tpe.member(TermName("foo")).asTerm).get))
+ println("method #1: " + failsafe(im.reflectMethod(tpe.member(TermName("bar")).asMethod)()))
+ println("field #2: " + failsafe(im.reflectField(tpe.member(TermName("quux")).asTerm).get))
+ println("method #2: " + failsafe(im.reflectMethod(tpe.member(TermName("baz")).asMethod)()))
+ println("constructor #1: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(TermName("bar")).asMethod)()))
+ println("constructor #2: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(TermName("<init>")).asMethod)()))
+ println("class: " + failsafe(im.reflectClass(tpe.member(TypeName("C")).asClass).reflectConstructor(typeOf[C].member(TypeName("C")).asClass.typeSignature.member(nme.CONSTRUCTOR).asMethod)()))
+ println("object: " + failsafe(im.reflectModule(tpe.member(TermName("O")).asModule).instance))
println()
}
diff --git a/test/files/run/reflection-valueclasses-derived.scala b/test/files/run/reflection-valueclasses-derived.scala
index 6b08f987ba..8d25e2929c 100644
--- a/test/files/run/reflection-valueclasses-derived.scala
+++ b/test/files/run/reflection-valueclasses-derived.scala
@@ -6,7 +6,7 @@ class C(val x: Int) extends AnyVal {
}
object Test extends App {
- println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("foo")).asMethod)(2))
- println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("getClass")).asMethod)())
- println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("toString")).asMethod)())
+ println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("foo")).asMethod)(2))
+ println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("getClass")).asMethod)())
+ println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(TermName("toString")).asMethod)())
} \ No newline at end of file
diff --git a/test/files/run/reflection-valueclasses-magic.scala b/test/files/run/reflection-valueclasses-magic.scala
index c4a26e460a..33d4634397 100644
--- a/test/files/run/reflection-valueclasses-magic.scala
+++ b/test/files/run/reflection-valueclasses-magic.scala
@@ -44,7 +44,7 @@ object Test extends App {
val realex = scala.ExceptionUtils.unwrapThrowable(ex)
println(realex.getClass + ": " + realex.getMessage)
}
- val meth = tpe.declaration(newTermName(method).encodedName.toTermName)
+ val meth = tpe.declaration(TermName(method).encodedName.toTermName)
val testees = if (meth.isMethod) List(meth.asMethod) else meth.asTerm.alternatives.map(_.asMethod)
testees foreach (testee => {
val convertedArgs = args.zipWithIndex.map { case (arg, i) => convert(arg, testee.paramss.flatten.apply(i).typeSignature) }
diff --git a/test/files/run/reflection-valueclasses-standard.scala b/test/files/run/reflection-valueclasses-standard.scala
index 18a3d1fa04..b6b5a2ede2 100644
--- a/test/files/run/reflection-valueclasses-standard.scala
+++ b/test/files/run/reflection-valueclasses-standard.scala
@@ -5,8 +5,8 @@ import scala.reflect.{ClassTag, classTag}
object Test extends App {
def test[T: ClassTag: TypeTag](x: T) = {
println(s"========${classTag[T].runtimeClass}========")
- println(cm.reflect(x).reflectMethod(typeOf[T].member(newTermName("getClass")).asMethod)())
- println(cm.reflect(x).reflectMethod(typeOf[T].member(newTermName("toString")).asMethod)())
+ println(cm.reflect(x).reflectMethod(typeOf[T].member(TermName("getClass")).asMethod)())
+ println(cm.reflect(x).reflectMethod(typeOf[T].member(TermName("toString")).asMethod)())
}
test(2.toByte)
diff --git a/test/files/run/reify-aliases.check b/test/files/run/reify-aliases.check
index aa846b9bc6..da784227af 100644
--- a/test/files/run/reify-aliases.check
+++ b/test/files/run/reify-aliases.check
@@ -1 +1 @@
-TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
diff --git a/test/files/run/reify_copypaste1.scala b/test/files/run/reify_copypaste1.scala
index c597b7af19..b2eef28026 100644
--- a/test/files/run/reify_copypaste1.scala
+++ b/test/files/run/reify_copypaste1.scala
@@ -9,8 +9,8 @@ object Test extends App {
val output = new java.io.ByteArrayOutputStream()
System.setOut(new java.io.PrintStream(output))
val toolBox = currentMirror.mkToolBox(options = "-Yreify-copypaste")
- val reify = Select(Select(Select(Select(Ident(ScalaPackage), newTermName("reflect")), newTermName("runtime")), newTermName("universe")), newTermName("reify"))
- val reifee = Block(List(ValDef(Modifiers(LAZY), newTermName("x"), TypeTree(), Apply(Ident(ListModule), List(Literal(Constant(1)), Literal(Constant(2)))))), Ident(newTermName("x")))
+ val reify = Select(Select(Select(Select(Ident(ScalaPackage), TermName("reflect")), TermName("runtime")), TermName("universe")), TermName("reify"))
+ val reifee = Block(List(ValDef(Modifiers(LAZY), TermName("x"), TypeTree(), Apply(Ident(ListModule), List(Literal(Constant(1)), Literal(Constant(2)))))), Ident(TermName("x")))
toolBox.eval(Apply(reify, List(reifee)))
val Block(List(tpeCopypaste), exprCopypaste @ ModuleDef(_, _, Template(_, _, (_ :: stats) :+ expr))) = toolBox.parse(output.toString())
output.reset()
diff --git a/test/pending/run/reify_implicits-new.check b/test/files/run/reify_implicits-new.check
index e3aeb20f6b..e3aeb20f6b 100644
--- a/test/pending/run/reify_implicits-new.check
+++ b/test/files/run/reify_implicits-new.check
diff --git a/test/pending/run/reify_implicits-new.scala b/test/files/run/reify_implicits-new.scala
index 42a1deef26..42a1deef26 100644
--- a/test/pending/run/reify_implicits-new.scala
+++ b/test/files/run/reify_implicits-new.scala
diff --git a/test/pending/run/reify_implicits-old.check b/test/files/run/reify_implicits-old.check
index e3aeb20f6b..e3aeb20f6b 100644
--- a/test/pending/run/reify_implicits-old.check
+++ b/test/files/run/reify_implicits-old.check
diff --git a/test/pending/run/reify_implicits-old.scala b/test/files/run/reify_implicits-old.scala
index 8ff256d2d4..8ff256d2d4 100644
--- a/test/pending/run/reify_implicits-old.scala
+++ b/test/files/run/reify_implicits-old.scala
diff --git a/test/files/run/reify_newimpl_11.check b/test/files/run/reify_newimpl_11.check
index 2f5cb581e6..c019c6db2d 100644
--- a/test/files/run/reify_newimpl_11.check
+++ b/test/files/run/reify_newimpl_11.check
@@ -1,2 +1,4 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_11.scala:6:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables:
+ T defined by C in reify_newimpl_11.scala:6:11
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_13.check b/test/files/run/reify_newimpl_13.check
index d518cd7b84..13e3c9af1e 100644
--- a/test/files/run/reify_newimpl_13.check
+++ b/test/files/run/reify_newimpl_13.check
@@ -1,2 +1,4 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_13.scala:7:13). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables:
+ T defined by C in reify_newimpl_13.scala:7:13
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_19.check b/test/files/run/reify_newimpl_19.check
index 8b8652f92c..c749d4f106 100644
--- a/test/files/run/reify_newimpl_19.check
+++ b/test/files/run/reify_newimpl_19.check
@@ -1,2 +1,4 @@
-scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
-unresolved free type variables (namely: T defined by C in reify_newimpl_19.scala:7:10). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables:
+ T defined by C in reify_newimpl_19.scala:7:10
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check
index dcb3e2889b..e68fe2f6b1 100644
--- a/test/files/run/reify_newimpl_22.check
+++ b/test/files/run/reify_newimpl_22.check
@@ -19,7 +19,7 @@ scala> {
}
println(code.eval)
}
-<console>:15: free term: Ident(newTermName("x")) defined by res0 in <console>:14:21
+<console>:15: free term: Ident(TermName("x")) defined by res0 in <console>:14:21
val code = reify {
^
2
diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check
index 882124657e..38c95646a8 100644
--- a/test/files/run/reify_newimpl_23.check
+++ b/test/files/run/reify_newimpl_23.check
@@ -18,7 +18,7 @@ scala> def foo[T]{
}
println(code.eval)
}
-<console>:13: free type: Ident(newTypeName("T")) defined by foo in <console>:12:16
+<console>:13: free type: Ident(TypeName("T")) defined by foo in <console>:12:16
val code = reify {
^
foo: [T]=> Unit
diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check
index d1028b94c7..86f6abce02 100644
--- a/test/files/run/reify_newimpl_25.check
+++ b/test/files/run/reify_newimpl_25.check
@@ -9,7 +9,7 @@ scala> {
val tt = implicitly[TypeTag[x.type]]
println(tt)
}
-<console>:11: free term: Ident(newTermName("x")) defined by res0 in <console>:10:21
+<console>:11: free term: Ident(TermName("x")) defined by res0 in <console>:10:21
val tt = implicitly[TypeTag[x.type]]
^
TypeTag[x.type]
diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check
index 347f6365aa..d3e2540de0 100644
--- a/test/files/run/reify_newimpl_26.check
+++ b/test/files/run/reify_newimpl_26.check
@@ -8,7 +8,7 @@ scala> def foo[T]{
val tt = implicitly[WeakTypeTag[List[T]]]
println(tt)
}
-<console>:9: free type: Ident(newTypeName("T")) defined by foo in <console>:7:16
+<console>:9: free type: Ident(TypeName("T")) defined by foo in <console>:7:16
val tt = implicitly[WeakTypeTag[List[T]]]
^
foo: [T]=> Unit
diff --git a/test/files/run/reify_newimpl_30.check b/test/files/run/reify_newimpl_30.check
index 29baac911e..7557c750a6 100644
--- a/test/files/run/reify_newimpl_30.check
+++ b/test/files/run/reify_newimpl_30.check
@@ -1,2 +1,4 @@
-reflective toolbox has failed:
-unresolved free type variables (namely: C defined by <local Test> in reify_newimpl_30.scala:7:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
+reflective toolbox failed due to unresolved free type variables:
+ C defined by <local Test> in reify_newimpl_30.scala:7:11
+have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
index 272856b962..9932a58dfa 100644
--- a/test/files/run/reify_printf.scala
+++ b/test/files/run/reify_printf.scala
@@ -23,7 +23,7 @@ object Test extends App {
*/
var i = 0
- def gensym(name: String) = { i += 1; newTermName(name + i) }
+ def gensym(name: String) = { i += 1; TermName(name + i) }
def createTempValDef( value : Tree, tpe : Type ) : (Option[Tree],Tree) = {
val local = gensym("temp")
@@ -59,10 +59,10 @@ object Test extends App {
Apply(
Select(
Select(
- Ident( newTermName("scala") )
- , newTermName("Predef")
+ Ident( TermName("scala") )
+ , TermName("Predef")
)
- , newTermName("print")
+ , TermName("print")
)
, List(ref)
): Tree
diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check
index 8b6434e986..a92243c7c0 100644
--- a/test/files/run/repl-bare-expr.check
+++ b/test/files/run/repl-bare-expr.check
@@ -31,7 +31,7 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo
<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
^
-defined module Cow
+defined object Cow
defined class Moo
bippy: Int
res2: Int = 105
@@ -39,7 +39,7 @@ res2: Int = 105
scala>
scala> object Bovine { var x: List[_] = null } ; case class Ruminant(x: Int) ; bippy * bippy * bippy
-defined module Bovine
+defined object Bovine
defined class Ruminant
res3: Int = 216
diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check
index 56ddd74375..27be3eb67d 100644
--- a/test/files/run/repl-colon-type.check
+++ b/test/files/run/repl-colon-type.check
@@ -4,17 +4,10 @@ Type :help for more information.
scala>
scala> :type List[1, 2, 3]
-<console>:2: error: identifier expected but integer literal found.
- List[1, 2, 3]
- ^
-<console>:3: error: ']' expected but '}' found.
- }
- ^
<console>:1: error: identifier expected but integer literal found.
List[1, 2, 3]
^
-
scala> :type List(1, 2, 3)
List[Int]
@@ -45,17 +38,13 @@ scala> :type lazy val f = 5
Int
scala> :type protected lazy val f = 5
-<console>:2: error: illegal start of statement (no modifiers allowed here)
- protected lazy val f = 5
- ^
<console>:5: error: lazy value f cannot be accessed in object $iw
Access to protected value f not permitted because
- enclosing object $eval in package $line19 is not a subclass of
+ enclosing object $eval in package $line13 is not a subclass of
object $iw where target is defined
- lazy val $result = `f`
+ lazy val $result = f
^
-
scala> :type def f = 5
=> Int
@@ -223,4 +212,14 @@ PolyType(
scala>
+scala> // SI-7132 - :type doesn't understand Unit
+
+scala> :type ()
+Unit
+
+scala> :type println("side effect!")
+Unit
+
+scala>
+
scala>
diff --git a/test/files/run/repl-colon-type.scala b/test/files/run/repl-colon-type.scala
index c055b215c2..8cf81a6afe 100644
--- a/test/files/run/repl-colon-type.scala
+++ b/test/files/run/repl-colon-type.scala
@@ -26,6 +26,10 @@ object Test extends ReplTest {
|:type -v Nil.combinations _
|:type -v def f[T <: AnyVal] = List[T]().combinations _
|:type -v def f[T, U >: T](x: T, y: List[U]) = x :: y
+ |
+ |// SI-7132 - :type doesn't understand Unit
+ |:type ()
+ |:type println("side effect!")
""".stripMargin
}
diff --git a/test/files/run/repl-empty-package.check b/test/files/run/repl-empty-package.check
new file mode 100644
index 0000000000..ecf79c2c6d
--- /dev/null
+++ b/test/files/run/repl-empty-package.check
@@ -0,0 +1,7 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> println(Bippy.bippy)
+bippy!
+
+scala>
diff --git a/test/files/run/repl-empty-package/s_1.scala b/test/files/run/repl-empty-package/s_1.scala
new file mode 100644
index 0000000000..b59d16b338
--- /dev/null
+++ b/test/files/run/repl-empty-package/s_1.scala
@@ -0,0 +1,3 @@
+object Bippy {
+ def bippy = "bippy!"
+}
diff --git a/test/files/run/repl-empty-package/s_2.scala b/test/files/run/repl-empty-package/s_2.scala
new file mode 100644
index 0000000000..512e6dd382
--- /dev/null
+++ b/test/files/run/repl-empty-package/s_2.scala
@@ -0,0 +1,5 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = "println(Bippy.bippy)"
+}
diff --git a/test/files/run/repl-javap-def.scala b/test/files/run/repl-javap-def.scala
new file mode 100644
index 0000000000..dbd769613a
--- /dev/null
+++ b/test/files/run/repl-javap-def.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |def f = 7
+ |:javap -public -raw f
+ """.stripMargin
+
+ // it should find f wrapped in repl skins. replstiltskin.
+ override def yah(res: Seq[String]) = {
+ // replstiltskin: what be my name?
+ val keywords = List("public", "class", "line")
+ def isLineClass(s: String) = keywords forall (s contains _)
+ def filtered = res filter isLineClass
+ 1 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap-fun.scala b/test/files/run/repl-javap-fun.scala
new file mode 100644
index 0000000000..5c9a6b7691
--- /dev/null
+++ b/test/files/run/repl-javap-fun.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |object Betty {
+ | List(1,2,3) filter (_ % 2 != 0) map (_ * 2)
+ |}
+ |:javap -fun Betty
+ """.stripMargin
+
+ // two anonfuns of Betty
+ override def yah(res: Seq[String]) = {
+ def filtered = res filter (_ contains "public final class Betty")
+ 2 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap-mem.scala b/test/files/run/repl-javap-mem.scala
new file mode 100644
index 0000000000..8db30e835c
--- /dev/null
+++ b/test/files/run/repl-javap-mem.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |object Betty {
+ | val ds = List(1,2,3) filter (_ % 2 == 0) map (_ * 3)
+ | def m(vs: List[Int]) = vs filter (_ % 2 != 0) map (_ * 2)
+ |}
+ |:javap Betty#m
+ """.stripMargin
+
+ // filter for requested method member
+ override def yah(res: Seq[String]) = {
+ // cheaply, methods end in arg list
+ val p = """.*m\(.*\);""".r
+ def filtered = res filter (_ match { case p() => true case _ => false })
+ 1 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap-memfun.scala b/test/files/run/repl-javap-memfun.scala
new file mode 100644
index 0000000000..d2b4243c8b
--- /dev/null
+++ b/test/files/run/repl-javap-memfun.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |object Betty {
+ | List(1,2,3) count (_ % 2 != 0)
+ | def f = List(1,2,3) filter (_ % 2 != 0) map (_ * 2)
+ | def g = List(1,2,3) filter (_ % 2 == 0) map (_ * 3) map (_ + 1)
+ |}
+ |:javap -fun Betty#g
+ """.stripMargin
+
+ // three anonfuns of Betty#g
+ override def yah(res: Seq[String]) = {
+ def filtered = res filter (_ contains "public final class Betty")
+ 3 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap-more-fun.scala b/test/files/run/repl-javap-more-fun.scala
new file mode 100644
index 0000000000..e603faf75a
--- /dev/null
+++ b/test/files/run/repl-javap-more-fun.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |object Betty {
+ | val ds = List(1,2,3) filter (_ % 2 == 0) map (_ * 3)
+ | def m(vs: List[Int]) = vs filter (_ % 2 != 0) map (_ * 2)
+ |}
+ |:javap -fun Betty
+ """.stripMargin
+
+ // two anonfuns of Betty
+ override def yah(res: Seq[String]) = {
+ def filtered = res filter (_ contains "public final class Betty")
+ 4 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap-outdir-funs/foo_1.scala b/test/files/run/repl-javap-outdir-funs/foo_1.scala
new file mode 100644
index 0000000000..9b98e94733
--- /dev/null
+++ b/test/files/run/repl-javap-outdir-funs/foo_1.scala
@@ -0,0 +1,6 @@
+
+package disktest
+
+class Foo {
+ def m(vs: List[Int]) = vs map (_ + 1)
+}
diff --git a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
new file mode 100644
index 0000000000..dfe3dae270
--- /dev/null
+++ b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |:javap -fun disktest/Foo.class
+ """.stripMargin
+
+ override def yah(res: Seq[String]) = {
+ def filtered = res filter (_ contains "public final class disktest.Foo")
+ 1 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap-outdir/foo_1.scala b/test/files/run/repl-javap-outdir/foo_1.scala
new file mode 100644
index 0000000000..9b98e94733
--- /dev/null
+++ b/test/files/run/repl-javap-outdir/foo_1.scala
@@ -0,0 +1,6 @@
+
+package disktest
+
+class Foo {
+ def m(vs: List[Int]) = vs map (_ + 1)
+}
diff --git a/test/files/run/repl-javap-outdir/run-repl_7.scala b/test/files/run/repl-javap-outdir/run-repl_7.scala
new file mode 100644
index 0000000000..dc2c5719ff
--- /dev/null
+++ b/test/files/run/repl-javap-outdir/run-repl_7.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |:javap disktest/Foo.class
+ """.stripMargin
+
+ override def yah(res: Seq[String]) = {
+ def filtered = res filter (_ contains "public class disktest.Foo")
+ 1 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-javap.scala b/test/files/run/repl-javap.scala
new file mode 100644
index 0000000000..7a19852d4e
--- /dev/null
+++ b/test/files/run/repl-javap.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.JavapTest
+
+object Test extends JavapTest {
+ def code = """
+ |case class Betty(i: Int) { def next = Betty(i+1) }
+ |:javap Betty
+ """.stripMargin
+
+ override def yah(res: Seq[String]) = {
+ def filtered = res filter (_ contains "public class Betty")
+ 1 == filtered.size
+ }
+}
diff --git a/test/files/run/repl-out-dir.check b/test/files/run/repl-out-dir.check
new file mode 100644
index 0000000000..a96f9ba9d9
--- /dev/null
+++ b/test/files/run/repl-out-dir.check
@@ -0,0 +1,53 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> case class Bippy(x: Int)
+defined class Bippy
+
+scala> val x = Bippy(1)
+x: Bippy = Bippy(1)
+
+scala> $intp.showDirectory
+repl-out-dir-run.obj
+ $line1
+ $eval$.class
+ $eval.class
+ $line2
+ $eval$.class
+ $eval.class
+ $read$$iw$$iw$.class
+ $read$$iw$.class
+ $read$.class
+ $read.class
+ $line3
+ $eval$.class
+ $eval.class
+ $read$$iw$$iw$.class
+ $read$$iw$$iw$Bippy$.class
+ $read$$iw$$iw$Bippy.class
+ $read$$iw$.class
+ $read$.class
+ $read.class
+ $line4
+ $eval$.class
+ $eval.class
+ $read$$iw$$iw$.class
+ $read$$iw$.class
+ $read$.class
+ $read.class
+ $line5
+ $eval$.class
+ $eval.class
+ $read$$iw$$iw$.class
+ $read$$iw$.class
+ $read$.class
+ $read.class
+ $repl_$init.class
+ Test$.class
+ Test.class
+
+scala>
+
+scala>
diff --git a/test/files/run/repl-out-dir.scala b/test/files/run/repl-out-dir.scala
new file mode 100644
index 0000000000..33c823aa2d
--- /dev/null
+++ b/test/files/run/repl-out-dir.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+import scala.tools.nsc.Settings
+
+object Test extends ReplTest {
+ override def extraSettings = s"-Yrepl-outdir ${testOutput.path}"
+
+ def code = s"""
+case class Bippy(x: Int)
+val x = Bippy(1)
+$$intp.showDirectory
+ """
+
+}
diff --git a/test/files/run/repl-paste.check b/test/files/run/repl-paste.check
index d3e171fbfb..e4c407c6e8 100644
--- a/test/files/run/repl-paste.check
+++ b/test/files/run/repl-paste.check
@@ -21,7 +21,7 @@ val x = (new Dingus).y
// Exiting paste mode, now interpreting.
defined class Dingus
-defined module Dingus
+defined object Dingus
x: Int = 110
scala>
diff --git a/test/files/run/runtime.scala b/test/files/run/runtime.scala
index 2dcb41fb50..a2ac204e8a 100644
--- a/test/files/run/runtime.scala
+++ b/test/files/run/runtime.scala
@@ -125,7 +125,7 @@ object Test2Test {
object Test3Test {
- class Foo { override def equals(that: Any) = error("abort"); }
+ class Foo { override def equals(that: Any) = sys.error("abort"); }
def check(expected: Boolean, actual1: Boolean, actual2: Boolean): Unit =
Console.println(
diff --git a/test/files/run/search.check b/test/files/run/search.check
new file mode 100644
index 0000000000..a885696509
--- /dev/null
+++ b/test/files/run/search.check
@@ -0,0 +1,6 @@
+Found(2)
+Found(4)
+InsertionPoint(9)
+Found(2)
+Found(4)
+InsertionPoint(9)
diff --git a/test/files/run/search.scala b/test/files/run/search.scala
new file mode 100644
index 0000000000..ed7fed54a7
--- /dev/null
+++ b/test/files/run/search.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+ import scala.collection.{LinearSeq, IndexedSeq}
+ import scala.collection.Searching.search
+
+ val ls = LinearSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13)
+ println(ls.search(3))
+ println(ls.search(5, 3, 8))
+ println(ls.search(12))
+
+ val is = IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13)
+ println(is.search(3))
+ println(is.search(5, 3, 8))
+ println(is.search(12))
+}
diff --git a/test/files/run/settings-parse.check b/test/files/run/settings-parse.check
new file mode 100644
index 0000000000..18145c9100
--- /dev/null
+++ b/test/files/run/settings-parse.check
@@ -0,0 +1,566 @@
+0) List(-cp, ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+1) List(-cp, , ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+2) List(, -cp, ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+3) List(-cp, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+4) List(-cp, , , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+5) List(-cp, , -deprecation, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+6) List(, -cp, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+7) List(-cp, , -deprecation, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+8) List(-cp, , , -deprecation, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+9) List(-cp, , -deprecation, , foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+10) List(-cp, , -deprecation, foo.scala, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+11) List(, -cp, , -deprecation, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+12) List(-cp, , foo.scala) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+13) List(-cp, , , foo.scala) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+14) List(-cp, , foo.scala, ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+15) List(, -cp, , foo.scala) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+16) List(-cp, , foo.scala, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+17) List(-cp, , , foo.scala, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+18) List(-cp, , foo.scala, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+19) List(-cp, , foo.scala, -deprecation, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+20) List(, -cp, , foo.scala, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+21) List(-deprecation, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+22) List(, -deprecation, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+23) List(-deprecation, -cp, , ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+24) List(-deprecation, , -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+25) List(-deprecation, -cp, , foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+26) List(, -deprecation, -cp, , foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+27) List(-deprecation, -cp, , , foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+28) List(-deprecation, -cp, , foo.scala, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+29) List(-deprecation, , -cp, , foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+30) List(-deprecation, foo.scala, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+31) List(, -deprecation, foo.scala, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+32) List(-deprecation, , foo.scala, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+33) List(-deprecation, foo.scala, -cp, , ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+34) List(-deprecation, foo.scala, , -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+35) List(foo.scala, -cp, ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+36) List(, foo.scala, -cp, ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+37) List(foo.scala, -cp, , ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+38) List(foo.scala, , -cp, ) ==> Settings {
+ -d = .
+ -classpath = ""
+}
+
+39) List(foo.scala, -cp, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+40) List(, foo.scala, -cp, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+41) List(foo.scala, -cp, , , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+42) List(foo.scala, -cp, , -deprecation, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+43) List(foo.scala, , -cp, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+44) List(foo.scala, -deprecation, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+45) List(, foo.scala, -deprecation, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+46) List(foo.scala, , -deprecation, -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+47) List(foo.scala, -deprecation, -cp, , ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+48) List(foo.scala, -deprecation, , -cp, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = ""
+}
+
+0) List(-cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+1) List(-cp, /tmp:/bippy, ) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+2) List(, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+3) List(-cp, /tmp:/bippy, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+4) List(-cp, /tmp:/bippy, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+5) List(-cp, /tmp:/bippy, -deprecation, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+6) List(, -cp, /tmp:/bippy, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+7) List(-cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+8) List(-cp, /tmp:/bippy, , -deprecation, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+9) List(-cp, /tmp:/bippy, -deprecation, , foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+10) List(-cp, /tmp:/bippy, -deprecation, foo.scala, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+11) List(, -cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+12) List(-cp, /tmp:/bippy, foo.scala) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+13) List(-cp, /tmp:/bippy, , foo.scala) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+14) List(-cp, /tmp:/bippy, foo.scala, ) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+15) List(, -cp, /tmp:/bippy, foo.scala) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+16) List(-cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+17) List(-cp, /tmp:/bippy, , foo.scala, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+18) List(-cp, /tmp:/bippy, foo.scala, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+19) List(-cp, /tmp:/bippy, foo.scala, -deprecation, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+20) List(, -cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+21) List(-deprecation, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+22) List(, -deprecation, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+23) List(-deprecation, -cp, /tmp:/bippy, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+24) List(-deprecation, , -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+25) List(-deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+26) List(, -deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+27) List(-deprecation, -cp, /tmp:/bippy, , foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+28) List(-deprecation, -cp, /tmp:/bippy, foo.scala, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+29) List(-deprecation, , -cp, /tmp:/bippy, foo.scala) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+30) List(-deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+31) List(, -deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+32) List(-deprecation, , foo.scala, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+33) List(-deprecation, foo.scala, -cp, /tmp:/bippy, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+34) List(-deprecation, foo.scala, , -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+35) List(foo.scala, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+36) List(, foo.scala, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+37) List(foo.scala, -cp, /tmp:/bippy, ) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+38) List(foo.scala, , -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -classpath = /tmp:/bippy
+}
+
+39) List(foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+40) List(, foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+41) List(foo.scala, -cp, /tmp:/bippy, , -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+42) List(foo.scala, -cp, /tmp:/bippy, -deprecation, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+43) List(foo.scala, , -cp, /tmp:/bippy, -deprecation) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+44) List(foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+45) List(, foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+46) List(foo.scala, , -deprecation, -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+47) List(foo.scala, -deprecation, -cp, /tmp:/bippy, ) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
+48) List(foo.scala, -deprecation, , -cp, /tmp:/bippy) ==> Settings {
+ -d = .
+ -deprecation = true
+ -classpath = /tmp:/bippy
+}
+
diff --git a/test/files/run/settings-parse.scala b/test/files/run/settings-parse.scala
new file mode 100644
index 0000000000..2b04f55b24
--- /dev/null
+++ b/test/files/run/settings-parse.scala
@@ -0,0 +1,27 @@
+import scala.tools.nsc._
+
+object Test {
+ val tokens = List("", "-deprecation", "foo.scala")
+ val subsets = tokens.toSet.subsets.toList
+ val permutations0 = subsets.flatMap(_.toList.permutations).distinct
+
+ def runWithCp(cp: String) = {
+ val permutations = permutations0 flatMap ("-cp CPTOKEN" :: _ permutations)
+
+ for ((p, i) <- permutations.distinct.sortBy(_ mkString "").zipWithIndex) {
+ val args = p flatMap (_ split "\\s+") map (x => if (x == "CPTOKEN") cp else x)
+ val s = new settings.MutableSettings(println)
+ val (ok, residual) = s.processArguments(args, processAll = true)
+
+ val expected = args filter (_ == "foo.scala")
+ assert(residual == expected, residual)
+ assert(ok, args)
+ println(s"$i) $args ==> $s")
+ }
+ }
+
+ def main(args0: Array[String]): Unit = {
+ runWithCp("")
+ runWithCp("/tmp:/bippy")
+ }
+}
diff --git a/test/files/run/shortClass.check b/test/files/run/shortClass.check
new file mode 100644
index 0000000000..fbdb725cca
--- /dev/null
+++ b/test/files/run/shortClass.check
@@ -0,0 +1,10 @@
+bippity.bop.Foo
+bippity.bop.Foo$Bar
+bippity.bop.Foo$Bar$
+Test$$anon$1
+Test$$anon$2
+Foo
+Bar
+Bar$
+Foo with DingDongBippy
+Bar with DingDongBippy
diff --git a/test/files/run/shortClass.scala b/test/files/run/shortClass.scala
new file mode 100644
index 0000000000..b7bb016896
--- /dev/null
+++ b/test/files/run/shortClass.scala
@@ -0,0 +1,24 @@
+import scala.reflect.internal.util._
+
+package bippity {
+ trait DingDongBippy
+
+ package bop {
+ class Foo {
+ class Bar
+ object Bar
+ }
+ }
+}
+
+object Test {
+ import bippity._
+ import bop._
+
+ def main(args: Array[String]): Unit = {
+ val f = new Foo
+ val instances = List(f, new f.Bar, f.Bar, new Foo with DingDongBippy, new f.Bar with DingDongBippy)
+ instances map (_.getClass.getName) foreach println
+ instances map shortClassOfInstance foreach println
+ }
+}
diff --git a/test/files/run/showraw_aliases.check b/test/files/run/showraw_aliases.check
index aebd354031..d6a198b1cb 100644
--- a/test/files/run/showraw_aliases.check
+++ b/test/files/run/showraw_aliases.check
@@ -1,2 +1,2 @@
-Block(List(Import(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), List(ImportSelector(newTermName("universe"), <offset>, newTermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), scala.reflect.runtime.package), [newTermName("universe") aka newTermName("ru")]))
-Block(List(Import(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), List(ImportSelector(newTermName("universe"), <offset>, newTermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), scala.reflect.runtime.package#<id>), [newTermName("universe")#<id> aka newTermName("ru")]))
+Block(List(Import(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), List(ImportSelector(TermName("universe"), <offset>, TermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), scala.reflect.runtime.package), [TermName("universe") aka TermName("ru")]))
+Block(List(Import(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), List(ImportSelector(TermName("universe"), <offset>, TermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), scala.reflect.runtime.package#<id>), [TermName("universe")#<id> aka TermName("ru")]))
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
index 7fca027614..3ec868542d 100644
--- a/test/files/run/showraw_mods.check
+++ b/test/files/run/showraw_mods.check
@@ -1 +1 @@
-Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTermName("z"), TypeTree(), Select(This(newTypeName("C")), newTermName("y"))))))), Literal(Constant(())))
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), TypeName("C"), List(), Template(List(Ident(TypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), TermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), TermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), TermName("y"), TypeTree(), Select(This(TypeName("C")), TermName("x"))), ValDef(Modifiers(LAZY), TermName("z"), TypeTree(), Select(This(TypeName("C")), TermName("y"))))))), Literal(Constant(())))
diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check
index b71018d4fe..eb74bd8b2b 100644
--- a/test/files/run/showraw_tree.check
+++ b/test/files/run/showraw_tree.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check
index 5835ffa6de..7e0149a3c1 100644
--- a/test/files/run/showraw_tree_ids.check
+++ b/test/files/run/showraw_tree_ids.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), newTypeName("String")), Select(Ident(scala.Predef#<id>), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), newTypeName("String")), Select(Ident(scala.Predef#<id>), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), TypeName("String")), Select(Ident(scala.Predef#<id>), TypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), TypeName("String")), Select(Ident(scala.Predef#<id>), TypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check
index c4d66856d4..577f447ae4 100644
--- a/test/files/run/showraw_tree_kinds.check
+++ b/test/files/run/showraw_tree_kinds.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), newTypeName("String")), Select(Ident(scala.Predef#MOD), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), newTypeName("String")), Select(Ident(scala.Predef#MOD), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), TypeName("String")), Select(Ident(scala.Predef#MOD), TypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), TypeName("String")), Select(Ident(scala.Predef#MOD), TypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
index fccb81d8df..6a73d77436 100644
--- a/test/files/run/showraw_tree_types_ids.check
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -1,12 +1,12 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
-[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()))))
[3] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List())
-[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())
+[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())
[5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
-[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), TypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())
[5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
-[6] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[6] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), TypeName("String")#<id>, List()))))
[8] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
index f3e0f8c5f6..cf63ecb586 100644
--- a/test/files/run/showraw_tree_types_typed.check
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -1,12 +1,12 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))))))), nme.CONSTRUCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()))))
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), nme.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))))
[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
-[4] TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+[4] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
[5] SingleType(ThisType(scala), scala.Predef)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))))))), nme.CONSTRUCTOR), List())
-[4] TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), TypeName("String"))))))), nme.CONSTRUCTOR), List())
+[4] TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())
[5] SingleType(ThisType(scala), scala.Predef)
-[6] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()))))
+[6] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), TypeName("String"), List()))))
[8] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check
index b71018d4fe..eb74bd8b2b 100644
--- a/test/files/run/showraw_tree_types_untyped.check
+++ b/test/files/run/showraw_tree_types_untyped.check
@@ -1,2 +1,2 @@
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
-Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), TypeName("String")), Select(Ident(scala.Predef), TypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
index a6286ba754..63f72de50b 100644
--- a/test/files/run/showraw_tree_ultimate.check
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -1,12 +1,12 @@
-Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#PCTOR), List())
-[1] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())))
-[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()))))
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#PCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()))))
[3] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List())
-[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())
+[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())
[5] SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD)
-Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#CTOR), List())
-[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#CTOR), List())
+[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())
[5] SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD)
-[6] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())))
-[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()))))
+[6] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), TypeName("String")#<id>#TPE, List()))))
[8] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List())
diff --git a/test/files/run/structural.scala b/test/files/run/structural.scala
index 36af8c4bfc..3a703d2cf1 100644
--- a/test/files/run/structural.scala
+++ b/test/files/run/structural.scala
@@ -152,7 +152,7 @@ object test2 {
object test3 {
- case class Exc extends Exception
+ case class Exc() extends Exception
object Rec {
def f = throw Exc()
diff --git a/test/files/run/t0091.check b/test/files/run/t0091.check
index 7ed6ff82de..fd3c81a4d7 100644
--- a/test/files/run/t0091.check
+++ b/test/files/run/t0091.check
@@ -1 +1,2 @@
5
+5
diff --git a/test/files/run/t0091.scala b/test/files/run/t0091.scala
index eaddde0dbf..45235eb77b 100644
--- a/test/files/run/t0091.scala
+++ b/test/files/run/t0091.scala
@@ -4,10 +4,13 @@ object C extends B {
object m extends A { def x = 5 }
}
object Test {
- // The type annotation here is necessary, otherwise
- // the compiler would reference C$m$ directly.
- def o : B = C
- def main(argv : Array[String]) : Unit = {
- println(o.m.x)
- }
+ // The type annotation here is necessary, otherwise
+ // the compiler would reference C$m$ directly.
+ def o1 : B = C
+ def o2 = C
+
+ def main(argv : Array[String]) : Unit = {
+ println(o1.m.x)
+ println(o2.m.x)
+ }
}
diff --git a/test/files/run/t1042.scala b/test/files/run/t1042.scala
index 1f39fff24a..302ff31053 100644
--- a/test/files/run/t1042.scala
+++ b/test/files/run/t1042.scala
@@ -6,7 +6,7 @@ abstract class A {
case class B() extends A {
// overloaded version is implemented, causing toString not to be implemented?
- def toString(sb: StringBuilder): StringBuilder = error("")
+ def toString(sb: StringBuilder): StringBuilder = sys.error("")
}
object Test extends App {
diff --git a/test/files/run/t107.check b/test/files/run/t107.check
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/test/files/run/t107.check
@@ -0,0 +1 @@
+1
diff --git a/test/files/run/t107.scala b/test/files/run/t107.scala
new file mode 100644
index 0000000000..ab1b289882
--- /dev/null
+++ b/test/files/run/t107.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main(args : Array[String]) : Unit = {
+ var hash : Long = 0
+ val bytes = Array(1.toByte, 2.toByte, 3.toByte)
+ hash += bytes(0)
+ Console.println(hash)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
index ab132b724f..6d2e7ee05f 100644
--- a/test/files/run/t1500.scala
+++ b/test/files/run/t1500.scala
@@ -21,7 +21,7 @@ object Test {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
val tool = new interpreter.IMain(settings)
- val global = tool.compiler
+ val global = tool.global
import global._
import definitions._
diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala
index aba206bc7a..a2f7bb3a65 100644
--- a/test/files/run/t1501.scala
+++ b/test/files/run/t1501.scala
@@ -31,7 +31,7 @@ object Test {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
val tool = new interpreter.IMain(settings)
- val global = tool.compiler
+ val global = tool.global
import global._
import definitions._
diff --git a/test/files/run/t1505.scala b/test/files/run/t1505.scala
index a246e8a35b..d7feb30ce3 100644
--- a/test/files/run/t1505.scala
+++ b/test/files/run/t1505.scala
@@ -1,5 +1,3 @@
-object P extends Enumeration(0, "A", "B", "C") { val A, B, C = Value }
-
object Q extends Enumeration {
val A = Value("A")
val B = Value("B")
@@ -11,9 +9,14 @@ object R extends Enumeration {
}
object Test extends App {
- assert(P(0) == P.withName("A"))
- assert(P.C == P.withName("C"))
-
assert(Q(0) == Q.withName("A"))
assert(Q.C == Q.withName("C"))
+
+ assert(R(0) == R.withName("A"))
+ assert(R.C == R.withName("C"))
+
+ var failed = false
+ try { Q.withName("x") } catch { case _: NoSuchElementException => failed = true }
+ assert(failed)
+
}
diff --git a/test/files/run/t2251.check b/test/files/run/t2251.check
new file mode 100644
index 0000000000..55ad2a5857
--- /dev/null
+++ b/test/files/run/t2251.check
@@ -0,0 +1 @@
+Set(List(List(C), Stream(D, ?)))
diff --git a/test/files/run/t2251.scala b/test/files/run/t2251.scala
new file mode 100644
index 0000000000..00c5619b49
--- /dev/null
+++ b/test/files/run/t2251.scala
@@ -0,0 +1,19 @@
+class A
+trait B[T <: B[T]] extends A
+class C extends B[C] { override def toString = "C" }
+class D extends B[D] { override def toString = "D" }
+
+class E {
+ val ys = List(List(new C), Stream(new D))
+}
+
+object Test {
+ def trav = List(List(), Stream())
+
+ def main(args: Array[String]): Unit = {
+ val f = (new E).ys _
+ var xs: Set[List[_ <: Seq[B[_]]]] = Set()
+ xs += f()
+ println(xs)
+ }
+}
diff --git a/test/files/run/t2251b.check b/test/files/run/t2251b.check
new file mode 100644
index 0000000000..42b0be457a
--- /dev/null
+++ b/test/files/run/t2251b.check
@@ -0,0 +1,11 @@
+TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def dropRight(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def takeRight(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def drop(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def take(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def splitAt(n: Int): (scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A], scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A])}]]
+TypeTag[List[scala.collection.immutable.Iterable[B[_ >: F with E with D with C <: B[_ >: F with E with D with C <: A]]] with F with Int => Any]]
+TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def takeRight(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def drop(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def take(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def slice(from: Int,until: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def splitAt(n: Int): (scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A], scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]); def init: scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}}]]
+TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[scala.collection.Map[_ >: F with C <: B[_ >: F with C <: B[_ >: F with C <: A]], B[_ >: G with D <: B[_ >: G with D <: A]]]]]
+TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def drop(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def take(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def slice(from: Int,until: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}}]]
+TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
+TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]]
diff --git a/test/files/run/t2251b.scala b/test/files/run/t2251b.scala
new file mode 100644
index 0000000000..b67b3aec1e
--- /dev/null
+++ b/test/files/run/t2251b.scala
@@ -0,0 +1,48 @@
+class A
+trait B[T <: B[T]] extends A
+class B1[T <: B1[T]] extends B[T]
+class C extends B[C] { override def toString = "C" }
+class D extends B[D] { override def toString = "D" }
+class E extends B[E] { override def toString = "E" }
+class F extends B[F] { override def toString = "F" }
+class G extends B1[G] { override def toString = "G" }
+
+object Test {
+ import scala.collection.{ mutable, immutable }
+ import scala.collection.immutable.{ Vector }
+ import scala.reflect.runtime.universe._
+ def what[T: TypeTag](x: T) = println(typeTag[T])
+
+ def main(args: Array[String]): Unit = {
+ what(List(List(new C), Stream(new D)))
+ what(List(List(new C), Stream(new D), Vector(new E), Set(new F)))
+ what(List(immutable.Vector(new C), Stream(new D)))
+ what(List(collection.Set(new F), mutable.Set(new G)))
+ what(List(collection.Set(new F), immutable.Set(new G)))
+ what(List(mutable.Set(new F), immutable.Set(new G)))
+ what(List(mutable.Seq(new F), immutable.Seq(new G)))
+ what(List(mutable.Map(new C -> new D), immutable.Map(new F -> new G)))
+ what(List(mutable.MutableList(new F), immutable.List(new G)))
+ what(List(mutable.Seq(new F), collection.Seq(new G)))
+ what(List(mutable.LinearSeq(new F), collection.IndexedSeq(new G)))
+ }
+}
+
+
+// class D extends B[D] { override def toString = "D" }
+
+
+// class E {
+// val ys = List(List(new C), Stream(new D))
+// }
+
+// object Test {
+// def trav = List(List(), Stream())
+
+// def main(args: Array[String]): Unit = {
+// val f = (new E).ys _
+// var xs: Set[List[_ <: Seq[B[_]]]] = Set()
+// xs += f()
+// println(xs)
+// }
+// }
diff --git a/test/pending/run/t2318.check b/test/files/run/t2318.check
index a486f1ac47..a486f1ac47 100644
--- a/test/pending/run/t2318.check
+++ b/test/files/run/t2318.check
diff --git a/test/pending/run/t2318.scala b/test/files/run/t2318.scala
index e42cbb9680..47d083eb9d 100644
--- a/test/pending/run/t2318.scala
+++ b/test/files/run/t2318.scala
@@ -7,7 +7,8 @@ object Test {
override def checkPermission(perm: Permission) = perm match {
case _: java.lang.RuntimePermission => ()
case _: java.io.FilePermission => ()
- case x: java.security.AccessControlException if x.getName contains ".networkaddress." => () // generality ftw
+ case x: java.security.SecurityPermission if x.getName contains ".networkaddress." => () // generality ftw
+ case x: java.util.PropertyPermission if x.getName == "sun.net.inetaddr.ttl" => ()
case _ => super.checkPermission(perm)
}
}
diff --git a/test/files/run/t2577.check b/test/files/run/t2577.check
new file mode 100644
index 0000000000..4a584e4989
--- /dev/null
+++ b/test/files/run/t2577.check
@@ -0,0 +1 @@
+Nothing
diff --git a/test/files/run/t2577.scala b/test/files/run/t2577.scala
new file mode 100644
index 0000000000..6d836a3996
--- /dev/null
+++ b/test/files/run/t2577.scala
@@ -0,0 +1,17 @@
+case class annot[T]() extends scala.annotation.StaticAnnotation
+
+// type inference should infer @annot[Nothing] instead of @annot[T]
+// note the T is not in scope here!
+class Foo[@annot U]
+
+object Test {
+ import scala.reflect.runtime.universe._
+ val x = new Foo
+
+ def main(args: Array[String]): Unit = {
+ val targ = typeOf[x.type].widen match {
+ case TypeRef(_, _, arg :: _) => arg
+ }
+ println(targ)
+ }
+}
diff --git a/test/files/run/t2873.check b/test/files/run/t2873.check
index 9198280f61..209b679c07 100644
--- a/test/files/run/t2873.check
+++ b/test/files/run/t2873.check
@@ -1 +1 @@
-scala.collection.immutable.RedBlack<A>.Empty$
+RedBlack<A>.Empty$
diff --git a/test/files/run/t2873.scala b/test/files/run/t2873.scala
index 8d48a8dbb4..3a3cc59b46 100644
--- a/test/files/run/t2873.scala
+++ b/test/files/run/t2873.scala
@@ -1,5 +1,10 @@
+abstract class RedBlack[A] extends Serializable {
+ abstract class Tree[+B] extends Serializable
+ case object Empty extends Tree[Nothing]
+}
+
object Test {
def main(args: Array[String]): Unit = {
- println(classOf[scala.collection.immutable.RedBlack[_]].getMethod("Empty").getGenericReturnType)
+ println(classOf[RedBlack[_]].getMethod("Empty").getGenericReturnType)
}
}
diff --git a/test/files/run/t2886.check b/test/files/run/t2886.check
index a70f9935ed..61e36948bd 100644
--- a/test/files/run/t2886.check
+++ b/test/files/run/t2886.check
@@ -1,5 +1,5 @@
((x: Predef.String) => {
- val x$1 = x;
- val x$2 = x;
+ <artifact> val x$1 = x;
+ <artifact> val x$2 = x;
Test.this.test(x$2, x$1)
})
diff --git a/test/files/run/t3038d.scala b/test/files/run/t3038d.scala
index 6cd2d83776..9550165235 100644
--- a/test/files/run/t3038d.scala
+++ b/test/files/run/t3038d.scala
@@ -16,9 +16,7 @@ trait Foo {
}
}
-
-@serializable
-class Bar extends Foo {
+class Bar extends Foo with Serializable {
@transient protected var first: Any = null
def size = a
@transient var second: Any = null
diff --git a/test/files/run/t3667.check b/test/files/run/t3667.check
index bbe5d1bc48..6375c88997 100644
--- a/test/files/run/t3667.check
+++ b/test/files/run/t3667.check
@@ -1,6 +1,3 @@
-1
-2
-3
4
2
3
diff --git a/test/files/run/t3667.scala b/test/files/run/t3667.scala
index f30d57ce3a..ada09d5886 100644
--- a/test/files/run/t3667.scala
+++ b/test/files/run/t3667.scala
@@ -1,27 +1,9 @@
object Test {
def main(args: Array[String]) {
- val o1 = new Outer1
- val o2 = new Outer2
- val o3 = new Outer3
val o4 = new Outer4
val o5 = new Outer5
val o6 = new Outer6
- println(1)
- ser(new o1.Inner(1))
- o1.Inner // make sure the Inner$module field of the Outer1 instance is initialized!
- ser(new o1.Inner(1))
-
- println(2)
- ser(new o2.Inner(1))
- o2.Inner
- ser(new o2.Inner(1))
-
- println(3)
- ser(new o3.Inner(1))
- o3.Inner
- ser(new o3.Inner(1))
-
println(4)
ser(new o4.Inner(1))
o4.Inner
@@ -54,23 +36,6 @@ object Test {
}
-@serializable
-class Outer1 {
- @serializable
- class Inner(x: Int = 1)
-}
-
-@serializable
-class Outer2 {
- case class Inner(x: Int = 1)
-}
-
-@serializable
-class Outer3 {
- case class Inner(x: Int)
-}
-
-
class Outer4 extends Serializable {
class Inner(x: Int = 1) extends Serializable
}
diff --git a/test/files/run/t3835.scala b/test/files/run/t3835.scala
index c120a61f6e..766b6ddc2e 100644
--- a/test/files/run/t3835.scala
+++ b/test/files/run/t3835.scala
@@ -1,6 +1,6 @@
object Test extends App {
// work around optimizer bug SI-5672 -- generates wrong bytecode for switches in arguments
- // virtpatmat happily emits a switch for a one-case switch, whereas -Xoldpatmat did not
+ // virtpatmat happily emits a switch for a one-case switch
// this is not the focus of this test, hence the temporary workaround
def a = (1, 2, 3) match { case (r, \u03b8, \u03c6) => r + \u03b8 + \u03c6 }
println(a)
diff --git a/test/pending/run/t3897.check b/test/files/run/t3897.check
index 244b83716f..244b83716f 100644
--- a/test/pending/run/t3897.check
+++ b/test/files/run/t3897.check
diff --git a/test/pending/run/t3897/J_2.java b/test/files/run/t3897/J_2.java
index 178412dc92..178412dc92 100644
--- a/test/pending/run/t3897/J_2.java
+++ b/test/files/run/t3897/J_2.java
diff --git a/test/pending/run/t3897/a_1.scala b/test/files/run/t3897/a_1.scala
index 4da959e2ac..4da959e2ac 100644
--- a/test/pending/run/t3897/a_1.scala
+++ b/test/files/run/t3897/a_1.scala
diff --git a/test/pending/run/t3897/a_2.scala b/test/files/run/t3897/a_2.scala
index 4d9e59ef05..4d9e59ef05 100644
--- a/test/pending/run/t3897/a_2.scala
+++ b/test/files/run/t3897/a_2.scala
diff --git a/test/files/run/t4023.check b/test/files/run/t4023.check
new file mode 100644
index 0000000000..05f867c397
--- /dev/null
+++ b/test/files/run/t4023.check
@@ -0,0 +1,21 @@
+Try 1: (6 classes)
+class Test$C$B1
+class Test$C$B2
+class Test$C$B3$
+class Test$C$B4$
+class Test$C$B5$
+class Test$C$B6$
+Try 2: (6 classes)
+class Test$C$B1
+class Test$C$B2
+class Test$C$B3$
+class Test$C$B4$
+class Test$C$B5$
+class Test$C$B6$
+Try 3: (6 classes)
+class Test$C$B1
+class Test$C$B2
+class Test$C$B3$
+class Test$C$B4$
+class Test$C$B5$
+class Test$C$B6$
diff --git a/test/files/run/t4023.scala b/test/files/run/t4023.scala
new file mode 100644
index 0000000000..4846fa31b4
--- /dev/null
+++ b/test/files/run/t4023.scala
@@ -0,0 +1,23 @@
+object Test {
+ object C {
+ class B1
+ private class B2
+ object B3
+ private object B4
+ object B5 extends B1
+ private object B6 extends B2
+
+ val valuesTry1 = this.getClass.getDeclaredClasses
+ val valuesTry2 = C.getClass.getDeclaredClasses
+ val valuesTry3 = getClass.getDeclaredClasses
+ }
+
+ def main(args: Array[String]) {
+ println("Try 1: (" + C.valuesTry1.length + " classes)")
+ C.valuesTry1.foreach(println)
+ println("Try 2: (" + C.valuesTry2.length + " classes)")
+ C.valuesTry2.foreach(println)
+ println("Try 3: (" + C.valuesTry3.length + " classes)")
+ C.valuesTry3.foreach(println)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t4047.scala b/test/files/run/t4047.scala
index cd42a8b4df..08989bd278 100644
--- a/test/files/run/t4047.scala
+++ b/test/files/run/t4047.scala
@@ -18,7 +18,7 @@ class D extends Bar[Unit]{
def foo = println("Unit: called D.foo")
}
-object Test extends Application {
+object Test extends App {
val a: Foo[Unit] = new A
a.foo
a.foo
diff --git a/test/files/pos/t4351.check b/test/files/run/t4351.check
index cb5d407e13..cb5d407e13 100644
--- a/test/files/pos/t4351.check
+++ b/test/files/run/t4351.check
diff --git a/test/files/pos/t4351.scala b/test/files/run/t4351.scala
index 2d57588793..d954d748b7 100644
--- a/test/files/pos/t4351.scala
+++ b/test/files/run/t4351.scala
@@ -1,7 +1,8 @@
object Test {
def main(args: Array[String]): Unit = {
- try new BooleanPropImpl() value
+ try new BooleanPropImpl().value
catch {
+ // was: StackOverflowError
case e: RuntimeException => println("runtime exception")
}
}
diff --git a/test/files/run/t4415.scala b/test/files/run/t4415.scala
index f96031d650..caf1609b9e 100644
--- a/test/files/run/t4415.scala
+++ b/test/files/run/t4415.scala
@@ -39,7 +39,7 @@ class SecondProperty extends TopProperty
class SubclassSecondProperty extends StandardProperty
trait MyProp[T]
-case class MyPropImpl[T] extends MyProp[T]
+case class MyPropImpl[T]() extends MyProp[T]
object SubclassMatch {
diff --git a/test/files/run/t4537.check b/test/files/run/t4537.check
new file mode 100644
index 0000000000..63739ca64a
--- /dev/null
+++ b/test/files/run/t4537.check
@@ -0,0 +1 @@
+b.Settings
diff --git a/test/files/neg/t4537/a.scala b/test/files/run/t4537/a.scala
index 65e183c5f8..125e223e13 100644
--- a/test/files/neg/t4537/a.scala
+++ b/test/files/run/t4537/a.scala
@@ -1,5 +1,5 @@
package a
private[a] object Settings {
- val X = 0
-} \ No newline at end of file
+ val X = "a.Settings"
+}
diff --git a/test/files/neg/t4537/b.scala b/test/files/run/t4537/b.scala
index bb9dd4e15a..c709d49b04 100644
--- a/test/files/neg/t4537/b.scala
+++ b/test/files/run/t4537/b.scala
@@ -1,5 +1,5 @@
package b
object Settings {
- val Y = 0
-} \ No newline at end of file
+ val Y = "b.Settings"
+}
diff --git a/test/files/run/t4537/c.scala b/test/files/run/t4537/c.scala
new file mode 100644
index 0000000000..ee05d4bbfb
--- /dev/null
+++ b/test/files/run/t4537/c.scala
@@ -0,0 +1,8 @@
+package b
+package c
+
+import a._
+
+object Unambiguous {
+ println(Settings.Y)
+}
diff --git a/test/files/run/t4537/d.scala b/test/files/run/t4537/d.scala
new file mode 100644
index 0000000000..dd1d2045ed
--- /dev/null
+++ b/test/files/run/t4537/d.scala
@@ -0,0 +1,6 @@
+import a._
+import b._
+
+object Test extends App {
+ println(Settings.Y)
+}
diff --git a/test/files/run/t4671.check b/test/files/run/t4671.check
index 4699818cd4..d4f8af480a 100644
--- a/test/files/run/t4671.check
+++ b/test/files/run/t4671.check
@@ -2,7 +2,7 @@ Type in expressions to have them evaluated.
Type :help for more information.
scala> object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
-defined module o
+defined object o
scala> val s = scala.io.Source.fromFile(o.file)
s: scala.io.BufferedSource = non-empty iterator
diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala
index e34e3d34d4..a80afb0257 100644
--- a/test/files/run/t4729/S_2.scala
+++ b/test/files/run/t4729/S_2.scala
@@ -20,7 +20,7 @@ object Test {
(new ScalaVarArgs).method("1", "2")
(new ScalaVarArgs: J_1).method("1", "2")
- //[4] Not Ok -- error when assigning anonymous class to a explictly typed val
+ //[4] Not Ok -- error when assigning anonymous class to an explictly typed val
// Compiler error: object creation impossible, since method method in trait VarArgs of type (s: <repeated...>[java.lang.String])Unit is not defined
val tagged: J_1 = new J_1 {
def method(s: String*) { println(s) }
diff --git a/test/files/run/t4859.check b/test/files/run/t4859.check
new file mode 100644
index 0000000000..d329744ca0
--- /dev/null
+++ b/test/files/run/t4859.check
@@ -0,0 +1,8 @@
+Inner
+Inner.i
+About to reference Inner.i
+Outer
+Inner.i
+About to reference O.N
+About to reference O.N
+About to reference O.N.apply()
diff --git a/test/files/run/t4859.scala b/test/files/run/t4859.scala
new file mode 100644
index 0000000000..3c20cea983
--- /dev/null
+++ b/test/files/run/t4859.scala
@@ -0,0 +1,29 @@
+object O {
+ case class N()
+ object P
+}
+
+object Outer {
+ println("Outer")
+ object Inner {
+ println("Inner")
+ def i {
+ println("Inner.i")
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ Outer.Inner.i // we still don't initialize Outer here (but should we?)
+
+ {println("About to reference Inner.i"); Outer}.Inner.i // Outer will be initialized.
+
+ {println("About to reference O.N" ); O}.N
+
+ {println("About to reference O.N" ); O}.N
+
+ {println("About to reference O.N.apply()"); O}.N.apply()
+ }
+}
+
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
index ac14fe5dbd..49d036a887 100644
--- a/test/files/run/t4935.flags
+++ b/test/files/run/t4935.flags
@@ -1 +1 @@
--optimize
+-optimize
diff --git a/test/files/run/t4996.check b/test/files/run/t4996.check
new file mode 100644
index 0000000000..8d45b413c9
--- /dev/null
+++ b/test/files/run/t4996.check
@@ -0,0 +1,4 @@
+B.foo
+M.foo
+B.foo
+M.foo \ No newline at end of file
diff --git a/test/files/run/t4996.scala b/test/files/run/t4996.scala
new file mode 100644
index 0000000000..8e7636aaac
--- /dev/null
+++ b/test/files/run/t4996.scala
@@ -0,0 +1,47 @@
+
+
+
+
+
+
+trait A[@specialized(Int) T] {
+ def foo(t: T)
+}
+
+
+trait B extends A[Int] {
+ def foo(t: Int) {
+ println("B.foo")
+ }
+}
+
+
+trait M extends B {
+ abstract override def foo(t: Int) {
+ super.foo(t)
+ println("M.foo")
+ }
+}
+
+
+object C extends B with M
+
+
+object D extends B {
+ override def foo(t: Int) {
+ super.foo(t)
+ println("M.foo")
+ }
+}
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ D.foo(42) // OK, prints B.foo M.foo
+ C.foo(42) // was StackOverflowError
+ }
+
+}
+
+
diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check
index 077006abd9..61ccfd16e7 100644
--- a/test/files/run/t5064.check
+++ b/test/files/run/t5064.check
@@ -1,6 +1,6 @@
-[12] T5064.super.<init>()
-[12] T5064.super.<init>
-[12] this
+[53] T5064.super.<init>()
+[53] T5064.super.<init>
+[53] this
[16:23] immutable.this.List.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
[16:20] immutable.this.List.apply
<16:20> immutable.this.List
diff --git a/test/files/run/t5225_2.check b/test/files/run/t5225_2.check
index 5faa365bce..477ea4eb6d 100644
--- a/test/files/run/t5225_2.check
+++ b/test/files/run/t5225_2.check
@@ -1,4 +1,4 @@
{
- def foo(@new `package`.cloneable() x: Int) = "";
+ def foo(@new elidable(0) x: Int) = "";
()
}
diff --git a/test/files/run/t5225_2.scala b/test/files/run/t5225_2.scala
index d1b607499c..cf0f23a5c8 100644
--- a/test/files/run/t5225_2.scala
+++ b/test/files/run/t5225_2.scala
@@ -1,6 +1,6 @@
import scala.reflect.runtime.universe._
object Test extends App {
- val tree = reify{def foo(@cloneable x: Int) = ""}.tree
+ val tree = reify{def foo(@annotation.elidable(0) x: Int) = ""}.tree
println(tree.toString)
} \ No newline at end of file
diff --git a/test/pending/run/t5293-map.scala b/test/files/run/t5293-map.scala
index 2707aed07e..2707aed07e 100644
--- a/test/pending/run/t5293-map.scala
+++ b/test/files/run/t5293-map.scala
diff --git a/test/pending/run/t5293.scala b/test/files/run/t5293.scala
index 01ead45d2a..01ead45d2a 100644
--- a/test/pending/run/t5293.scala
+++ b/test/files/run/t5293.scala
diff --git a/test/files/run/t5353.check b/test/files/run/t5353.check
new file mode 100644
index 0000000000..a2906793ed
--- /dev/null
+++ b/test/files/run/t5353.check
@@ -0,0 +1,2 @@
+1
+[Ljava.lang.Object; cannot be cast to [Ljava.lang.String;
diff --git a/test/files/run/t5353.scala b/test/files/run/t5353.scala
new file mode 100644
index 0000000000..5208fe527f
--- /dev/null
+++ b/test/files/run/t5353.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ def f(x: Boolean) = if (x) Array("abc") else Array()
+ try {
+ println(f(true).length)
+ println(f(false).length)
+ } catch {
+ case ex: Throwable => println(ex.getMessage)
+ }
+}
diff --git a/test/files/run/t5418.check b/test/files/run/t5418.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t5418.check
diff --git a/test/pending/run/t5418.scala b/test/files/run/t5418.scala
index e3cb20cf82..e3cb20cf82 100644
--- a/test/pending/run/t5418.scala
+++ b/test/files/run/t5418.scala
diff --git a/test/files/run/t5418b.check b/test/files/run/t5418b.check
index 48d82a2aae..f036a4be84 100644
--- a/test/files/run/t5418b.check
+++ b/test/files/run/t5418b.check
@@ -1,2 +1,2 @@
new Object().getClass()
-TypeRef(ThisType(java.lang), java.lang.Class, List(TypeRef(NoPrefix, newTypeName("?0"), List())))
+TypeRef(ThisType(java.lang), java.lang.Class, List(TypeRef(NoPrefix, TypeName("?0"), List())))
diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check
index 5127d3c1c7..3b2eb55313 100644
--- a/test/files/run/t5603.check
+++ b/test/files/run/t5603.check
@@ -12,7 +12,7 @@
[95:101]<paramaccessor> private[this] val i: [98:101]Int = _;
<119:139>def <init>([95]i: [98]Int) = <119:139>{
<119:139>val nameElse = <134:139>"Bob";
- [94][94][94]super.<init>();
+ [NoPosition][NoPosition][NoPosition]super.<init>();
[94]()
};
[168:184]val name = [179:184]"avc";
@@ -20,7 +20,7 @@
};
[215:241]object Test extends [227:241][235:238]App {
[227]def <init>() = [227]{
- [227][227][227]super.<init>();
+ [NoPosition][NoPosition][NoPosition]super.<init>();
[227]()
};
[NoPosition]<empty>
diff --git a/test/files/run/t5603.scala b/test/files/run/t5603.scala
index 60dfd01fee..8c8038a602 100644
--- a/test/files/run/t5603.scala
+++ b/test/files/run/t5603.scala
@@ -36,7 +36,8 @@ object Test extends DirectTest {
val settings = new Settings()
settings.Xprintpos.value = true
+ settings.Yrangepos.value = true
val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
- new Global(command.settings, new ConsoleReporter(settings)) with interactive.RangePositions
+ Global(command.settings, new ConsoleReporter(settings))
}
}
diff --git a/test/pending/run/t5610a.check b/test/files/run/t5610a.check
index 2aa46b3b91..2aa46b3b91 100644
--- a/test/pending/run/t5610a.check
+++ b/test/files/run/t5610a.check
diff --git a/test/pending/run/t5610a.scala b/test/files/run/t5610a.scala
index f20b295762..f20b295762 100644
--- a/test/pending/run/t5610a.scala
+++ b/test/files/run/t5610a.scala
diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check
index 43ebd50e7a..1103b0e36f 100644
--- a/test/files/run/t5655.check
+++ b/test/files/run/t5655.check
@@ -4,7 +4,7 @@ Type :help for more information.
scala>
scala> object x { def x={} }
-defined module x
+defined object x
scala> import x._
import x._
diff --git a/test/files/run/t5879.check b/test/files/run/t5879.check
index b6cbda35a7..4bdf3f5fcf 100644
--- a/test/files/run/t5879.check
+++ b/test/files/run/t5879.check
@@ -1,16 +1,8 @@
Map(1 -> 1)
1
-Map(1 -> 1)
-1
-(1,1)
-Map(1 -> 1)
-1
(1,1)
Map(1 -> 1)
1
(1,2)
Map(1 -> 2)
2
-(1,2)
-Map(1 -> 2)
-2 \ No newline at end of file
diff --git a/test/files/run/t5879.scala b/test/files/run/t5879.scala
index e1c07fc4c2..18dd94289d 100644
--- a/test/files/run/t5879.scala
+++ b/test/files/run/t5879.scala
@@ -17,10 +17,6 @@ object Test {
val r = a.merged(b)(null)
println(r)
println(r(1))
-
- val rold = a.merge(b)
- println(rold)
- println(rold(1))
}
def resolveFirst() {
@@ -34,10 +30,6 @@ object Test {
val r = a.merged(b) { collision }
println(r)
println(r(1))
-
- val rold = a.merge(b, collision)
- println(rold)
- println(rold(1))
}
def resolveSecond() {
@@ -51,10 +43,6 @@ object Test {
val r = a.merged(b) { collision }
println(r)
println(r(1))
-
- val rold = a.merge(b, collision)
- println(rold)
- println(rold(1))
}
def resolveMany() {
@@ -66,9 +54,6 @@ object Test {
val r = a.merged(b) { collision }
for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v))
-
- val rold = a.merge(b, collision)
- for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v))
}
}
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index 94013efd36..67c30e35c6 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -31,14 +31,14 @@ package <empty> {
};
final def apply(): Int = $anonfun$foo$1.this.apply$mcI$sp();
<specialized> def apply$mcI$sp(): Int = $anonfun$foo$1.this.$outer.T$$classParam.+($anonfun$foo$1.this.$outer.field()).+($anonfun$foo$1.this.methodParam$1).+($anonfun$foo$1.this.methodLocal$1);
- <synthetic> <paramaccessor> private[this] val $outer: T = _;
- <synthetic> <stable> def T$$anonfun$$$outer(): T = $anonfun$foo$1.this.$outer;
- final <bridge> def apply(): Object = scala.Int.box($anonfun$foo$1.this.apply());
+ <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+ <synthetic> <stable> <artifact> def T$$anonfun$$$outer(): T = $anonfun$foo$1.this.$outer;
+ final <bridge> <artifact> def apply(): Object = scala.Int.box($anonfun$foo$1.this.apply());
<synthetic> <paramaccessor> private[this] val methodParam$1: Int = _;
<synthetic> <paramaccessor> private[this] val methodLocal$1: Int = _
};
abstract trait MethodLocalTrait$1 extends Object {
- <synthetic> <stable> def T$MethodLocalTrait$$$outer(): T
+ <synthetic> <stable> <artifact> def T$MethodLocalTrait$$$outer(): T
};
object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
def <init>($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = {
@@ -46,9 +46,9 @@ package <empty> {
MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
()
};
- <synthetic> <paramaccessor> private[this] val $outer: T = _;
- <synthetic> <stable> def T$MethodLocalObject$$$outer(): T = MethodLocalObject$2.this.$outer;
- <synthetic> <stable> def T$MethodLocalTrait$$$outer(): T = MethodLocalObject$2.this.$outer
+ <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+ <synthetic> <stable> <artifact> def T$MethodLocalObject$$$outer(): T = MethodLocalObject$2.this.$outer;
+ <synthetic> <stable> <artifact> def T$MethodLocalTrait$$$outer(): T = MethodLocalObject$2.this.$outer
};
final <stable> private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
@@ -69,9 +69,9 @@ package <empty> {
<specialized> def apply$mcV$sp(): Unit = try {
$anonfun$tryy$1.this.tryyLocal$1.elem = $anonfun$tryy$1.this.tryyParam$1
} finally ();
- <synthetic> <paramaccessor> private[this] val $outer: T = _;
- <synthetic> <stable> def T$$anonfun$$$outer(): T = $anonfun$tryy$1.this.$outer;
- final <bridge> def apply(): Object = {
+ <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+ <synthetic> <stable> <artifact> def T$$anonfun$$$outer(): T = $anonfun$tryy$1.this.$outer;
+ final <bridge> <artifact> def apply(): Object = {
$anonfun$tryy$1.this.apply();
scala.runtime.BoxedUnit.UNIT
};
diff --git a/test/files/run/t6064.scala b/test/files/run/t6064.scala
new file mode 100644
index 0000000000..fc184dd92d
--- /dev/null
+++ b/test/files/run/t6064.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ assert(Option(42) contains 42)
+ assert(Some(42) contains 42)
+ assert(Option(BigInt(42)) contains 42)
+ assert(Option(42) contains BigInt(42))
+ assert(!(None contains 42))
+ assert(Some(null) contains null)
+ assert(!(Option(null) contains null))
+} \ No newline at end of file
diff --git a/test/files/run/t6102.flags b/test/files/run/t6102.flags
index e35535c8ea..72fe7b1aa0 100644
--- a/test/files/run/t6102.flags
+++ b/test/files/run/t6102.flags
@@ -1 +1 @@
- -Ydead-code
+ -Ydead-code -Ydebug -Xfatal-warnings
diff --git a/test/files/run/t6150.scala b/test/files/run/t6150.scala
index bd8af5d460..f3e83e1549 100644
--- a/test/files/run/t6150.scala
+++ b/test/files/run/t6150.scala
@@ -1,7 +1,3 @@
-
-
-
-
object Test {
import collection.{ immutable, mutable, generic }
def TheOneTrueCBF = collection.IndexedSeq.ReusableCBF
@@ -38,7 +34,3 @@ object Test {
check(iv.:+(4)(cbf3))
}
}
-
-
-
-
diff --git a/test/files/run/t6178.scala b/test/files/run/t6178.scala
index 0b4cf0bbf5..41e148af91 100644
--- a/test/files/run/t6178.scala
+++ b/test/files/run/t6178.scala
@@ -2,6 +2,6 @@ import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
- val plus = typeOf[java.lang.String].member(newTermName("$plus")).asMethod
+ val plus = typeOf[java.lang.String].member(TermName("$plus")).asMethod
println(cm.reflect("").reflectMethod(plus).apply("2"))
} \ No newline at end of file
diff --git a/test/files/run/t6181.scala b/test/files/run/t6181.scala
index fb23eaff63..eaa7340178 100644
--- a/test/files/run/t6181.scala
+++ b/test/files/run/t6181.scala
@@ -3,6 +3,6 @@ import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
class C { def test(x: => Int) = println(x) }
- val mm = cm.reflect(new C).reflectMethod(typeOf[C].member(newTermName("test")).asMethod)
+ val mm = cm.reflect(new C).reflectMethod(typeOf[C].member(TermName("test")).asMethod)
mm(2)
} \ No newline at end of file
diff --git a/test/files/run/t6187.check b/test/files/run/t6187.check
index c0ca02923b..621306b2ef 100644
--- a/test/files/run/t6187.check
+++ b/test/files/run/t6187.check
@@ -12,7 +12,7 @@ scala> def macroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]
macroImpl: [T](c: scala.reflect.macros.Context)(t: c.Expr[T])(implicit evidence$1: c.WeakTypeTag[T])c.Expr[List[T]]
scala> def demo[T](t: T): List[T] = macro macroImpl[T]
-demo: [T](t: T)List[T]
+defined term macro demo: [T](t: T)List[T]
scala> def m[T](t: T): List[List[T]] =
demo( List((t,true)) collect { case (x,true) => x } )
diff --git a/test/files/run/t6199-mirror.scala b/test/files/run/t6199-mirror.scala
index 772a384542..3fda56bf7c 100644
--- a/test/files/run/t6199-mirror.scala
+++ b/test/files/run/t6199-mirror.scala
@@ -3,5 +3,5 @@ import scala.reflect.runtime.{currentMirror => cm}
object Test extends App {
class C { def foo = () }
- println(cm.reflect(new C).reflectMethod(typeOf[C].member(newTermName("foo")).asMethod)())
+ println(cm.reflect(new C).reflectMethod(typeOf[C].member(TermName("foo")).asMethod)())
} \ No newline at end of file
diff --git a/test/files/run/t6223.check b/test/files/run/t6223.check
index 90ec019407..4a09d1930f 100644
--- a/test/files/run/t6223.check
+++ b/test/files/run/t6223.check
@@ -1,4 +1,4 @@
bar
-bar$mcI$sp
bar$mIc$sp
bar$mIcI$sp
+bar$mcI$sp
diff --git a/test/files/run/t6223.scala b/test/files/run/t6223.scala
index 4ab7c832e6..fb176e32e6 100644
--- a/test/files/run/t6223.scala
+++ b/test/files/run/t6223.scala
@@ -5,7 +5,7 @@ class Foo[@specialized(Int) A](a:A) {
object Test {
def main(args:Array[String]) {
val f = new Foo(333)
- val ms = f.getClass().getDeclaredMethods()
+ val ms = f.getClass().getDeclaredMethods().sortBy(_.getName)
ms.foreach(m => println(m.getName))
}
}
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
index e6467edc95..4465b6302c 100644
--- a/test/files/run/t6288.check
+++ b/test/files/run/t6288.check
@@ -1,8 +1,8 @@
[[syntax trees at end of patmat]] // newSource1
[7]package [7]<empty> {
[7]object Case3 extends [13][106]scala.AnyRef {
- [13]def <init>(): [13]Case3.type = [13]{
- [13][13][13]Case3.super.<init>();
+ [106]def <init>(): [13]Case3.type = [106]{
+ [106][106][106]Case3.super.<init>();
[13]()
};
[21]def unapply([29]z: [32]<type: [32]scala.Any>): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([58]-1);
@@ -24,8 +24,8 @@
}
};
[113]object Case4 extends [119][217]scala.AnyRef {
- [119]def <init>(): [119]Case4.type = [119]{
- [119][119][119]Case4.super.<init>();
+ [217]def <init>(): [119]Case4.type = [217]{
+ [217][217][217]Case4.super.<init>();
[119]()
};
[127]def unapplySeq([138]z: [141]<type: [141]scala.Any>): [127]Option[List[Int]] = [167]scala.None;
@@ -50,8 +50,8 @@
}
};
[224]object Case5 extends [230][312]scala.AnyRef {
- [230]def <init>(): [230]Case5.type = [230]{
- [230][230][230]Case5.super.<init>();
+ [312]def <init>(): [230]Case5.type = [312]{
+ [312][312][312]Case5.super.<init>();
[230]()
};
[238]def unapply([246]z: [249]<type: [249]scala.Any>): [238]Boolean = [265]true;
@@ -60,7 +60,7 @@
[273]case5()[293]{
[293]<synthetic> val o7: [293]Option[List[Int]] = [293][293]Case4.unapplySeq([293]x1);
[293]if ([293]o7.isEmpty.unary_!)
- [293]if ([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([195]0)))
+ [293]if ([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([293]0)))
[304][304]matchEnd4([304]())
else
[293][293]case6()
diff --git a/test/files/run/t6288b-jump-position.check b/test/files/run/t6288b-jump-position.check
index 83ba810958..ece88b18f0 100644
--- a/test/files/run/t6288b-jump-position.check
+++ b/test/files/run/t6288b-jump-position.check
@@ -65,9 +65,9 @@ object Case3 extends Object {
blocks: [1]
1:
- 1 THIS(Case3)
- 1 CALL_METHOD java.lang.Object.<init> (super())
- 1 RETURN(UNIT)
+ 12 THIS(Case3)
+ 12 CALL_METHOD java.lang.Object.<init> (super())
+ 12 RETURN(UNIT)
}
Exception handlers:
diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check
index 693263a5c2..3480bbdd0b 100644
--- a/test/files/run/t6329_repl.check
+++ b/test/files/run/t6329_repl.check
@@ -3,11 +3,37 @@ Type :help for more information.
scala>
-scala> classManifest[List[_]]
+scala> import scala.reflect.classTag
+import scala.reflect.classTag
+
+scala> classManifest[scala.List[_]]
warning: there were 1 deprecation warning(s); re-run with -deprecation for details
-res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any]
+res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
-scala> scala.reflect.classTag[List[_]]
+scala> classTag[scala.List[_]]
res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
+scala> classManifest[scala.collection.immutable.List[_]]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res2: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[<?>]
+
+scala> classTag[scala.collection.immutable.List[_]]
+res3: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
+
+scala> classManifest[Predef.Set[_]]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res4: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
+
+scala> classTag[Predef.Set[_]]
+res5: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set
+
+scala> classManifest[scala.collection.immutable.Set[_]]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res6: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[<?>]
+
+scala> classTag[scala.collection.immutable.Set[_]]
+res7: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set
+
+scala>
+
scala>
diff --git a/test/files/run/t6329_repl.scala b/test/files/run/t6329_repl.scala
index add6d64962..f210d6512c 100644
--- a/test/files/run/t6329_repl.scala
+++ b/test/files/run/t6329_repl.scala
@@ -2,7 +2,14 @@ import scala.tools.partest.ReplTest
object Test extends ReplTest {
def code = """
- |classManifest[List[_]]
- |scala.reflect.classTag[List[_]]
- |""".stripMargin
+ |import scala.reflect.classTag
+ |classManifest[scala.List[_]]
+ |classTag[scala.List[_]]
+ |classManifest[scala.collection.immutable.List[_]]
+ |classTag[scala.collection.immutable.List[_]]
+ |classManifest[Predef.Set[_]]
+ |classTag[Predef.Set[_]]
+ |classManifest[scala.collection.immutable.Set[_]]
+ |classTag[scala.collection.immutable.Set[_]]
+ """.stripMargin
}
diff --git a/test/files/run/t6329_vanilla.check b/test/files/run/t6329_vanilla.check
index 8282afaeba..ad8f4b5c77 100644
--- a/test/files/run/t6329_vanilla.check
+++ b/test/files/run/t6329_vanilla.check
@@ -1,2 +1,8 @@
-scala.collection.immutable.List[Any]
+scala.collection.immutable.List[<?>]
scala.collection.immutable.List
+scala.collection.immutable.List[<?>]
+scala.collection.immutable.List
+scala.collection.immutable.Set[<?>]
+scala.collection.immutable.Set
+scala.collection.immutable.Set[<?>]
+scala.collection.immutable.Set
diff --git a/test/files/run/t6329_vanilla.scala b/test/files/run/t6329_vanilla.scala
index a31cd5c72e..f2d843896d 100644
--- a/test/files/run/t6329_vanilla.scala
+++ b/test/files/run/t6329_vanilla.scala
@@ -1,4 +1,12 @@
+import scala.reflect.classTag
+
object Test extends App {
- println(classManifest[List[_]])
- println(scala.reflect.classTag[List[_]])
-} \ No newline at end of file
+ println(classManifest[scala.List[_]])
+ println(classTag[scala.List[_]])
+ println(classManifest[scala.collection.immutable.List[_]])
+ println(classTag[scala.collection.immutable.List[_]])
+ println(classManifest[Predef.Set[_]])
+ println(classTag[Predef.Set[_]])
+ println(classManifest[scala.collection.immutable.Set[_]])
+ println(classTag[scala.collection.immutable.Set[_]])
+}
diff --git a/test/files/run/t6381.check b/test/files/run/t6381.check
new file mode 100644
index 0000000000..5070b67e46
--- /dev/null
+++ b/test/files/run/t6381.check
@@ -0,0 +1,17 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import language.experimental.macros
+import language.experimental.macros
+
+scala> def pos_impl(c: reflect.macros.Context): c.Expr[String] =
+ c.literal(c.enclosingPosition.getClass.toString)
+pos_impl: (c: scala.reflect.macros.Context)c.Expr[String]
+
+scala> def pos = macro pos_impl
+defined term macro pos: String
+
+scala> pos
+res0: String = class scala.reflect.internal.util.RangePosition
+
+scala>
diff --git a/test/files/run/t6381.scala b/test/files/run/t6381.scala
new file mode 100644
index 0000000000..859ec3cb30
--- /dev/null
+++ b/test/files/run/t6381.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import language.experimental.macros
+ |def pos_impl(c: reflect.macros.Context): c.Expr[String] =
+ | c.literal(c.enclosingPosition.getClass.toString)
+ |def pos = macro pos_impl
+ |pos
+ |""".stripMargin.trim
+
+ override def extraSettings: String = "-Yrangepos"
+}
diff --git a/test/files/run/t6392b.check b/test/files/run/t6392b.check
index e9c7ecaa34..2afc48495f 100644
--- a/test/files/run/t6392b.check
+++ b/test/files/run/t6392b.check
@@ -1 +1 @@
-ModuleDef(Modifiers(), newTermName("C"), Template(List(Select(Ident(scala#PK), newTypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(newTypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
+ModuleDef(Modifiers(), TermName("C"), Template(List(Select(Ident(scala#PK), TypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(TypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
diff --git a/test/files/run/t6394a/Macros_1.scala b/test/files/run/t6394a/Macros_1.scala
index 3d39d3e40a..5aa07e7f41 100644
--- a/test/files/run/t6394a/Macros_1.scala
+++ b/test/files/run/t6394a/Macros_1.scala
@@ -4,7 +4,7 @@ object Macros {
def impl(c:Context): c.Expr[Any] = {
import c.universe._
- val selfTree = This(c.enclosingClass.symbol.asModule.moduleClass)
+ val selfTree = This(c.enclosingImpl.symbol.asModule.moduleClass)
c.Expr[AnyRef](selfTree)
}
diff --git a/test/files/run/t6406-regextract.check b/test/files/run/t6406-regextract.check
new file mode 100644
index 0000000000..88c5a52eb3
--- /dev/null
+++ b/test/files/run/t6406-regextract.check
@@ -0,0 +1,4 @@
+List(1, 3)
+List(1, 3)
+List(1, 3)
+Some(2011) Some(2011)
diff --git a/test/files/run/t6406-regextract.scala b/test/files/run/t6406-regextract.scala
new file mode 100644
index 0000000000..83679a5167
--- /dev/null
+++ b/test/files/run/t6406-regextract.scala
@@ -0,0 +1,30 @@
+
+object Test extends App {
+ import util.matching._
+ import Regex._
+
+ val r = "(\\d+)".r
+ val q = """(\d)""".r
+ val ns = List("1,2","x","3,4")
+ val u = r.unanchored
+
+ val is = ns collect { case u(x) => x } map { case r(x) => x }
+ println(is)
+ // Match from same pattern
+ val js = (ns map { u findFirstMatchIn _ }).flatten map { case r(x) => x }
+ println(js)
+ // Match not from same pattern
+ val ks = (ns map { q findFirstMatchIn _ }).flatten map { case r(x) => x }
+ println(ks)
+
+ val t = "Last modified 2011-07-15"
+ val p1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r
+ val y1: Option[String] = for {
+ p1(year, month, day) <- p1 findFirstIn t
+ } yield year
+ val y2: Option[String] = for {
+ p1(year, month, day) <- p1 findFirstMatchIn t
+ } yield year
+ println(s"$y1 $y2")
+
+}
diff --git a/test/files/run/t6439.check b/test/files/run/t6439.check
index 178ea739f5..3f5f7dc8a4 100644
--- a/test/files/run/t6439.check
+++ b/test/files/run/t6439.check
@@ -7,7 +7,7 @@ scala> class A
defined class A
scala> object A // warn
-defined module A
+defined object A
warning: previously defined class A is not a companion to object A.
Companions must be defined together; you may wish to use :paste mode for this.
@@ -15,15 +15,15 @@ scala> trait B
defined trait B
scala> object B // warn
-defined module B
+defined object B
warning: previously defined trait B is not a companion to object B.
Companions must be defined together; you may wish to use :paste mode for this.
scala> object C
-defined module C
+defined object C
scala> object Bippy
-defined module Bippy
+defined object Bippy
scala> class C // warn
defined class C
@@ -40,13 +40,13 @@ scala> val D = 0 // no warn
D: Int = 0
scala> object E
-defined module E
+defined object E
scala> var E = 0 // no warn
E: Int = 0
scala> object F
-defined module F
+defined object F
scala> type F = Int // no warn
defined type alias F
@@ -58,7 +58,18 @@ scala> :power
** global._, definitions._ also imported **
** Try :help, :vals, power.<tab> **
-scala> intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+scala> object lookup {
+ import intp._
+ def apply(name: String): Symbol = types(name) orElse terms(name)
+ def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name)
+ def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name)
+ def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+ def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol
+ def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+}
+defined object lookup
+
+scala> lookup("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
res0: $r.intp.global.Symbol = type F
scala>
diff --git a/test/files/run/t6439.scala b/test/files/run/t6439.scala
index 70a2dbafaf..175a1d134f 100644
--- a/test/files/run/t6439.scala
+++ b/test/files/run/t6439.scala
@@ -1,6 +1,7 @@
import scala.tools.partest.ReplTest
object Test extends ReplTest {
+
def code = """
class A
object A // warn
@@ -17,6 +18,15 @@ var E = 0 // no warn
object F
type F = Int // no warn
:power
-intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+object lookup {
+ import intp._
+ def apply(name: String): Symbol = types(name) orElse terms(name)
+ def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name)
+ def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name)
+ def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+ def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol
+ def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol
+}
+lookup("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
"""
}
diff --git a/test/files/run/t6448.check b/test/files/run/t6448.check
new file mode 100644
index 0000000000..9401568319
--- /dev/null
+++ b/test/files/run/t6448.check
@@ -0,0 +1,32 @@
+
+=List.collect=
+f(1)
+f(2)
+List(1)
+
+=List.collectFirst=
+f(1)
+Some(1)
+
+=Option.collect=
+f(1)
+Some(1)
+
+=Option.collect=
+f(2)
+None
+
+=Stream.collect=
+f(1)
+f(2)
+List(1)
+
+=Stream.collectFirst=
+f(1)
+Some(1)
+
+=ParVector.collect=
+(ParVector(1),2)
+
+=ParArray.collect=
+(ParArray(1),2)
diff --git a/test/files/run/t6448.scala b/test/files/run/t6448.scala
new file mode 100644
index 0000000000..4d1528e500
--- /dev/null
+++ b/test/files/run/t6448.scala
@@ -0,0 +1,61 @@
+// Tests to show that various `collect` functions avoid calling
+// both `PartialFunction#isDefinedAt` and `PartialFunction#apply`.
+//
+object Test {
+ def f(i: Int) = { println("f(" + i + ")"); true }
+ class Counter {
+ var count = 0
+ def apply(i: Int) = synchronized {count += 1; true}
+ }
+
+ def testing(label: String)(body: => Any) {
+ println(s"\n=$label=")
+ println(body)
+ }
+
+ def main(args: Array[String]) {
+ testing("List.collect")(List(1, 2) collect { case x if f(x) && x < 2 => x})
+ testing("List.collectFirst")(List(1, 2) collectFirst { case x if f(x) && x < 2 => x})
+ testing("Option.collect")(Some(1) collect { case x if f(x) && x < 2 => x})
+ testing("Option.collect")(Some(2) collect { case x if f(x) && x < 2 => x})
+ testing("Stream.collect")((Stream(1, 2).collect { case x if f(x) && x < 2 => x}).toList)
+ testing("Stream.collectFirst")(Stream.continually(1) collectFirst { case x if f(x) && x < 2 => x})
+
+ import collection.parallel.ParIterable
+ import collection.parallel.immutable.ParVector
+ import collection.parallel.mutable.ParArray
+ testing("ParVector.collect") {
+ val counter = new Counter()
+ (ParVector(1, 2) collect { case x if counter(x) && x < 2 => x}, counter.synchronized(counter.count))
+ }
+
+ testing("ParArray.collect") {
+ val counter = new Counter()
+ (ParArray(1, 2) collect { case x if counter(x) && x < 2 => x}, counter.synchronized(counter.count))
+ }
+
+ object PendingTests {
+ testing("Iterator.collect")((Iterator(1, 2) collect { case x if f(x) && x < 2 => x}).toList)
+
+ testing("List.view.collect")((List(1, 2).view collect { case x if f(x) && x < 2 => x}).force)
+
+ // This would do the trick in Future.collect, but I haven't added this yet as there is a tradeoff
+ // with extra allocations to consider.
+ //
+ // pf.lift(v) match {
+ // case Some(x) => p success x
+ // case None => fail(v)
+ // }
+ testing("Future.collect") {
+ import concurrent.ExecutionContext.Implicits.global
+ import concurrent.Await
+ import concurrent.duration.Duration
+ val result = concurrent.future(1) collect { case x if f(x) => x}
+ Await.result(result, Duration.Inf)
+ }
+
+ // TODO Future.{onSuccess, onFailure, recoverWith, andThen}
+ }
+
+ }
+}
diff --git a/test/files/run/t6467.scala b/test/files/run/t6467.scala
new file mode 100644
index 0000000000..dc93b69fdc
--- /dev/null
+++ b/test/files/run/t6467.scala
@@ -0,0 +1,20 @@
+
+
+
+
+import collection._
+
+
+
+object Test extends App {
+
+ def compare(s1: String, s2: String) {
+ assert(s1 == s2, s1 + "\nvs.\n" + s2)
+ }
+
+ compare(List(1, 2, 3, 4).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234")
+ compare(List(1, 2, 3, 4).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234")
+ compare(Seq(0 until 100: _*).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString)
+ compare(Seq(0 until 100: _*).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString)
+
+} \ No newline at end of file
diff --git a/test/files/run/t6548.check b/test/files/run/t6548.check
new file mode 100644
index 0000000000..5dfcb12e02
--- /dev/null
+++ b/test/files/run/t6548.check
@@ -0,0 +1,2 @@
+false
+List(JavaAnnotationWithNestedEnum_1(value = VALUE))
diff --git a/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java
new file mode 100644
index 0000000000..32004de537
--- /dev/null
+++ b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java
@@ -0,0 +1,17 @@
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD, ElementType.FIELD,
+ ElementType.TYPE, ElementType.PARAMETER})
+@Retention(RetentionPolicy.RUNTIME)
+public @interface JavaAnnotationWithNestedEnum_1
+{
+ public Value value() default Value.VALUE;
+
+ public enum Value
+ {
+ VALUE;
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t6548/Test_2.scala b/test/files/run/t6548/Test_2.scala
new file mode 100644
index 0000000000..7200259d36
--- /dev/null
+++ b/test/files/run/t6548/Test_2.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class Bean {
+ @JavaAnnotationWithNestedEnum_1(JavaAnnotationWithNestedEnum_1.Value.VALUE)
+ def value = 1
+}
+
+object Test extends App {
+ println(cm.staticClass("Bean").isCaseClass)
+ println(typeOf[Bean].declaration(TermName("value")).annotations)
+}
diff --git a/test/files/run/t6572/bar_1.scala b/test/files/run/t6572/bar_1.scala
deleted file mode 100644
index 5518ced7af..0000000000
--- a/test/files/run/t6572/bar_1.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package bar
-
-abstract class IntBase[V] extends Base[Int, V]
-
-class DefaultIntBase[V <: IntProvider] extends IntBase[V] {
- override protected def hashCode(key: Int) = key
-}
-
-trait IntProvider {
- def int: Int
-}
-
-abstract class Base[@specialized K, V] {
-
- protected def hashCode(key: K) = key.hashCode
-
- def get(key: K): V = throw new RuntimeException
-
-} \ No newline at end of file
diff --git a/test/files/run/t6572/foo_2.scala b/test/files/run/t6572/foo_2.scala
deleted file mode 100644
index 465f0b7c3c..0000000000
--- a/test/files/run/t6572/foo_2.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-//package foo
-
-import bar._
-
-class FooProvider extends IntProvider {
- def int = 3
-}
-
-class Wrapper(users: DefaultIntBase[FooProvider]) {
- final def user(userId: Int) = users.get(userId)
-}
-
-object Test {
- def main(args: Array[String]) {
- new Wrapper(new DefaultIntBase)
- }
-} \ No newline at end of file
diff --git a/test/files/run/t6591_1.check b/test/files/run/t6591_1.check
index b6cb6c286d..d1d448f283 100644
--- a/test/files/run/t6591_1.check
+++ b/test/files/run/t6591_1.check
@@ -1 +1 @@
-Block(List(ValDef(Modifiers(), newTermName("v"), Select(Ident(A), newTypeName("I")), Select(Ident(A), newTermName("impl")))), Ident(newTermName("v")))
+Block(List(ValDef(Modifiers(), TermName("v"), Select(Ident(A), TypeName("I")), Select(Ident(A), TermName("impl")))), Ident(TermName("v")))
diff --git a/test/files/run/t6591_2.check b/test/files/run/t6591_2.check
index b2d5797cbf..8c972ef920 100644
--- a/test/files/run/t6591_2.check
+++ b/test/files/run/t6591_2.check
@@ -1 +1 @@
-Block(List(ValDef(Modifiers(), newTermName("v"), SelectFromTypeTree(Ident(A), newTypeName("I")), Select(Apply(Select(New(Ident(A)), nme.CONSTRUCTOR), List()), newTermName("impl")))), Ident(newTermName("v")))
+Block(List(ValDef(Modifiers(), TermName("v"), SelectFromTypeTree(Ident(A), TypeName("I")), Select(Apply(Select(New(Ident(A)), nme.CONSTRUCTOR), List()), TermName("impl")))), Ident(TermName("v")))
diff --git a/test/files/run/t6591_3.check b/test/files/run/t6591_3.check
index a7b594ba65..f4592adce9 100644
--- a/test/files/run/t6591_3.check
+++ b/test/files/run/t6591_3.check
@@ -1 +1 @@
-Block(List(ValDef(Modifiers(), newTermName("v"), Select(This(newTypeName("A")), newTypeName("I")), Apply(Select(New(Select(This(newTypeName("A")), newTypeName("I"))), nme.CONSTRUCTOR), List()))), Ident(newTermName("v")))
+Block(List(ValDef(Modifiers(), TermName("v"), Select(This(TypeName("A")), TypeName("I")), Apply(Select(New(Select(This(TypeName("A")), TypeName("I"))), nme.CONSTRUCTOR), List()))), Ident(TermName("v")))
diff --git a/test/files/run/t6591_5.check b/test/files/run/t6591_5.check
index e0b6d06e6b..4ebc2236af 100644
--- a/test/files/run/t6591_5.check
+++ b/test/files/run/t6591_5.check
@@ -1 +1 @@
-Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Select(This(newTypeName("A")), newTermName("x")), newTypeName("I")), Select(Ident(scala.Predef), newTermName("$qmark$qmark$qmark")))), Ident(newTermName("v"))))
+Expr(Block(List(ValDef(Modifiers(), TermName("v"), Select(Select(This(TypeName("A")), TermName("x")), TypeName("I")), Select(Ident(scala.Predef), TermName("$qmark$qmark$qmark")))), Ident(TermName("v"))))
diff --git a/test/files/run/t6591_6.check b/test/files/run/t6591_6.check
index 0c4847b628..940e2026fe 100644
--- a/test/files/run/t6591_6.check
+++ b/test/files/run/t6591_6.check
@@ -1 +1 @@
-Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Select(Ident(newTermName("A")), newTermName("x")), newTypeName("I")), Select(Ident(scala.Predef), newTermName("$qmark$qmark$qmark")))), Ident(newTermName("v"))))
+Expr(Block(List(ValDef(Modifiers(), TermName("v"), Select(Select(Ident(TermName("A")), TermName("x")), TypeName("I")), Select(Ident(scala.Predef), TermName("$qmark$qmark$qmark")))), Ident(TermName("v"))))
diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala
index 0947a48f90..c295368aea 100644
--- a/test/files/run/t6611.scala
+++ b/test/files/run/t6611.scala
@@ -7,55 +7,55 @@ object Test extends App {
locally {
val a = Array("1": Object)
- val a2 = Array[Object](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(true)
- val a2 = Array[Boolean](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(1: Short)
- val a2 = Array[Short](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(1: Byte)
- val a2 = Array[Byte](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(1)
- val a2 = Array[Int](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(1L)
- val a2 = Array[Long](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(1f)
- val a2 = Array[Float](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(1d)
- val a2 = Array[Double](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
locally {
val a = Array(())
- val a2 = Array[Unit](a: _*)
+ val a2 = Array(a: _*)
assert(a ne a2)
}
}
diff --git a/test/files/run/t6745-2.scala b/test/files/run/t6745-2.scala
new file mode 100644
index 0000000000..31ecd42bd1
--- /dev/null
+++ b/test/files/run/t6745-2.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc._
+import scala.tools.partest.CompilerTest
+import scala.collection.{ mutable, immutable, generic }
+
+object Test extends CompilerTest {
+ import global._
+ import rootMirror._
+ import definitions._
+ import global.analyzer.{Context, ImportInfo}
+
+ override def code = """
+package context {
+}
+ """
+
+ def check(source: String, unit: global.CompilationUnit) = {
+ val context: Context = global.analyzer.rootContext(unit)
+ val importInfo: ImportInfo = context.imports.head // Predef._
+ val importedSym = importInfo.importedSymbol(nme.CONSTRUCTOR)
+ assert(importedSym == NoSymbol, importedSym) // was "constructor Predef"
+ }
+}
diff --git a/test/files/run/t6860.check b/test/files/run/t6860.check
new file mode 100644
index 0000000000..c96331f540
--- /dev/null
+++ b/test/files/run/t6860.check
@@ -0,0 +1,4 @@
+Bippy[String]
+Bippy[String]
+throws[Nothing]
+throws[RuntimeException]
diff --git a/test/files/run/t6860.scala b/test/files/run/t6860.scala
new file mode 100644
index 0000000000..2dcc2a67f7
--- /dev/null
+++ b/test/files/run/t6860.scala
@@ -0,0 +1,20 @@
+class Bippy[T](val value: T) extends annotation.StaticAnnotation
+
+class A {
+ @Bippy("hi") def f1: Int = 1
+ @Bippy[String]("hi") def f2: Int = 2
+
+ @throws("what do I throw?") def f3 = throw new RuntimeException
+ @throws[RuntimeException]("that's good to know!") def f4 = throw new RuntimeException
+}
+
+object Test {
+ import scala.reflect.runtime.universe._
+
+ def main(args: Array[String]): Unit = {
+ val members = typeOf[A].declarations.toList
+ val tpes = members flatMap (_.annotations) map (_.tpe)
+
+ tpes.map(_.toString).sorted foreach println
+ }
+}
diff --git a/test/files/run/t6939.scala b/test/files/run/t6939.scala
new file mode 100644
index 0000000000..9fe721555f
--- /dev/null
+++ b/test/files/run/t6939.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ val foo = <x:foo xmlns:x="http://foo.com/"><x:bar xmlns:x="http://bar.com/"><x:baz/></x:bar></x:foo>
+ assert(foo.child.head.scope.toString == """ xmlns:x="http://bar.com/"""")
+
+ val fooDefault = <foo xmlns="http://foo.com/"><bar xmlns="http://bar.com/"><baz/></bar></foo>
+ assert(fooDefault.child.head.scope.toString == """ xmlns="http://bar.com/"""")
+
+ val foo2 = scala.xml.XML.loadString("""<x:foo xmlns:x="http://foo.com/"><x:bar xmlns:x="http://bar.com/"><x:baz/></x:bar></x:foo>""")
+ assert(foo2.child.head.scope.toString == """ xmlns:x="http://bar.com/"""")
+
+ val foo2Default = scala.xml.XML.loadString("""<foo xmlns="http://foo.com/"><bar xmlns="http://bar.com/"><baz/></bar></foo>""")
+ assert(foo2Default.child.head.scope.toString == """ xmlns="http://bar.com/"""")
+}
diff --git a/test/files/run/t6955.scala b/test/files/run/t6955.scala
index 2610acdec4..980aa420cc 100644
--- a/test/files/run/t6955.scala
+++ b/test/files/run/t6955.scala
@@ -1,5 +1,6 @@
import scala.tools.partest.IcodeTest
+// this class should compile to code that uses switches (twice)
class Switches {
type Tag = Byte
@@ -10,7 +11,8 @@ class Switches {
case _ => 0
}
- def switchOkay(i: Byte): Int = i match { // notice type of i is Byte
+ // this worked before, should keep working
+ def switchOkay(i: Byte): Int = i match {
case 1 => 1
case 2 => 2
case 3 => 3
diff --git a/test/files/run/t6989.check b/test/files/run/t6989.check
index 3a94f6e8df..8943792115 100644
--- a/test/files/run/t6989.check
+++ b/test/files/run/t6989.check
@@ -113,18 +113,6 @@ isProtected = false
isPublic = false
privateWithin = <none>
============
-sym = class $PrivateJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
-isPrivate = true
-isProtected = false
-isPublic = false
-privateWithin = <none>
-============
-sym = value this$0, signature = foo.JavaClass_1, owner = class $PrivateJavaClass
-isPrivate = false
-isProtected = false
-isPublic = false
-privateWithin = package foo
-============
sym = class $ProtectedJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
isPrivate = false
isProtected = true
@@ -143,18 +131,6 @@ isProtected = false
isPublic = false
privateWithin = package foo
============
-sym = class $ProtectedJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
-isPrivate = false
-isProtected = true
-isPublic = false
-privateWithin = package foo
-============
-sym = value this$0, signature = foo.JavaClass_1, owner = class $ProtectedJavaClass
-isPrivate = false
-isProtected = false
-isPublic = false
-privateWithin = package foo
-============
sym = class $PublicJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
isPrivate = false
isProtected = false
@@ -179,97 +155,55 @@ isProtected = false
isPublic = true
privateWithin = <none>
============
-sym = class $PublicJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
-isPrivate = false
-isProtected = false
-isPublic = true
-privateWithin = <none>
-============
-sym = constructor $PublicJavaClass, signature = (x$1: foo.JavaClass_1)JavaClass_1.this.$PublicJavaClass, owner = class $PublicJavaClass
+sym = constructor JavaClass_1, signature = ()foo.JavaClass_1, owner = class JavaClass_1
isPrivate = false
isProtected = false
isPublic = true
privateWithin = <none>
============
-sym = value this$0, signature = foo.JavaClass_1, owner = class $PublicJavaClass
-isPrivate = false
-isProtected = false
-isPublic = false
-privateWithin = package foo
-============
-sym = constructor JavaClass_1, signature = ()foo.JavaClass_1, owner = class JavaClass_1
+sym = object JavaClass_1, signature = foo.JavaClass_1.type, owner = package foo
isPrivate = false
isProtected = false
isPublic = true
privateWithin = <none>
============
-sym = class PrivateStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+sym = class PrivateStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
isPrivate = true
isProtected = false
isPublic = false
privateWithin = <none>
============
-sym = object PrivateStaticJavaClass, signature = JavaClass_1.this.PrivateStaticJavaClass.type, owner = class JavaClass_1
+sym = object PrivateStaticJavaClass, signature = foo.JavaClass_1.PrivateStaticJavaClass.type, owner = object JavaClass_1
isPrivate = true
isProtected = false
isPublic = false
privateWithin = <none>
============
-sym = class ProtectedStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+sym = class ProtectedStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
isPrivate = true
isProtected = false
isPublic = false
privateWithin = <none>
============
-sym = object ProtectedStaticJavaClass, signature = JavaClass_1.this.ProtectedStaticJavaClass.type, owner = class JavaClass_1
+sym = object ProtectedStaticJavaClass, signature = foo.JavaClass_1.ProtectedStaticJavaClass.type, owner = object JavaClass_1
isPrivate = true
isProtected = false
isPublic = false
privateWithin = <none>
============
-sym = class PublicStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
-isPrivate = false
-isProtected = false
-isPublic = true
-privateWithin = <none>
-============
-sym = constructor PublicStaticJavaClass, signature = ()JavaClass_1.this.PublicStaticJavaClass, owner = class PublicStaticJavaClass
-isPrivate = false
-isProtected = false
-isPublic = true
-privateWithin = <none>
-============
-sym = object PublicStaticJavaClass, signature = JavaClass_1.this.PublicStaticJavaClass.type, owner = class JavaClass_1
+sym = class PublicStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
isPrivate = false
isProtected = false
isPublic = true
privateWithin = <none>
============
-sym = object JavaClass_1, signature = foo.JavaClass_1.type, owner = package foo
-isPrivate = false
-isProtected = false
-isPublic = true
-privateWithin = <none>
-============
-sym = class PrivateStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
-isPrivate = true
-isProtected = false
-isPublic = false
-privateWithin = <none>
-============
-sym = class ProtectedStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
-isPrivate = true
-isProtected = false
-isPublic = false
-privateWithin = <none>
-============
-sym = class PublicStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+sym = constructor PublicStaticJavaClass, signature = ()foo.JavaClass_1.PublicStaticJavaClass, owner = class PublicStaticJavaClass
isPrivate = false
isProtected = false
isPublic = true
privateWithin = <none>
============
-sym = constructor PublicStaticJavaClass, signature = ()JavaClass_1.this.PublicStaticJavaClass, owner = class PublicStaticJavaClass
+sym = object PublicStaticJavaClass, signature = foo.JavaClass_1.PublicStaticJavaClass.type, owner = object JavaClass_1
isPrivate = false
isProtected = false
isPublic = true
diff --git a/test/files/run/t7015.check b/test/files/run/t7015.check
new file mode 100644
index 0000000000..7651fe06b0
--- /dev/null
+++ b/test/files/run/t7015.check
@@ -0,0 +1,11 @@
+Method returns Null type: null
+Method takes non Null type: null
+call through method null
+call through bridge null
+fetch field: null
+fetch field on companion: null
+fetch local: null
+fetch array element: null
+method that takes object: null
+method that takes anyref: null
+method that takes any: null
diff --git a/test/files/run/t7015.scala b/test/files/run/t7015.scala
new file mode 100644
index 0000000000..37a73a9fc4
--- /dev/null
+++ b/test/files/run/t7015.scala
@@ -0,0 +1,49 @@
+object Test {
+ def main(args : Array[String]) : Unit = {
+ println(s"Method returns Null type: $f")
+ println(s"Method takes non Null type: ${g(null)}")
+
+ // pass things through the g function because it expects
+ // a string. If we haven't adapted properly then we'll
+ // get verify errors
+ val b = new B
+ println(s"call through method ${g(b.f(null))}")
+ println(s"call through bridge ${g((b: A).f(null))}")
+
+ println(s"fetch field: ${g(b.nullField)}")
+ println(s"fetch field on companion: ${g(B.nullCompanionField)}")
+
+ val x = f
+ println(s"fetch local: ${g(x)}")
+
+ val nulls = Array(f, f, f)
+ println(s"fetch array element: ${g(nulls(0))}")
+
+ println(s"method that takes object: ${q(f)}")
+ println(s"method that takes anyref: ${r(f)}")
+ println(s"method that takes any: ${s(f)}")
+ }
+
+ def f: Null = null
+
+ def g(x: String) = x
+
+ def q(x: java.lang.Object) = x
+ def r(x: AnyRef) = x
+ def s(x: Any) = x
+}
+
+abstract class A {
+ def f(x: String): String
+}
+
+class B extends A {
+ val nullField = null
+
+ // this forces a bridge method because the return type is different
+ override def f(x: String) : Null = null
+}
+
+object B {
+ val nullCompanionField = null
+} \ No newline at end of file
diff --git a/test/files/run/t7064-old-style-supercalls.check b/test/files/run/t7064-old-style-supercalls.check
deleted file mode 100644
index 0cfbf08886..0000000000
--- a/test/files/run/t7064-old-style-supercalls.check
+++ /dev/null
@@ -1 +0,0 @@
-2
diff --git a/test/files/run/t7064-old-style-supercalls.scala b/test/files/run/t7064-old-style-supercalls.scala
deleted file mode 100644
index cffa7b1888..0000000000
--- a/test/files/run/t7064-old-style-supercalls.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-import scala.reflect.runtime.universe._
-import Flag._
-import definitions._
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.Eval
-
-object Test extends App {
- val PARAMACCESSOR = (1L << 29).asInstanceOf[FlagSet]
-
- // these trees can be acquired by running the following incantation:
- // echo 'class C(val x: Int); class D extends C(2)' > foo.scala
- // ./scalac -Xprint:parser -Yshow-trees-stringified -Yshow-trees-compact foo.scala
-
- val c = ClassDef(
- Modifiers(), newTypeName("C"), List(),
- Template(
- List(Select(Ident(ScalaPackage), newTypeName("AnyRef"))),
- emptyValDef,
- List(
- ValDef(Modifiers(PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree),
- DefDef(
- Modifiers(),
- nme.CONSTRUCTOR,
- List(),
- List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree))),
- TypeTree(),
- Block(
- List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
- Literal(Constant(())))))))
- val d = ClassDef(
- Modifiers(), newTypeName("D"), List(),
- Template(
- List(Ident(newTypeName("C"))),
- emptyValDef,
- List(
- DefDef(
- Modifiers(),
- nme.CONSTRUCTOR,
- List(),
- List(List()),
- TypeTree(),
- Block(
- List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List(Literal(Constant(2))))),
- Literal(Constant(())))))))
- val result = Select(Apply(Select(New(Ident(newTypeName("D"))), nme.CONSTRUCTOR), List()), newTermName("x"))
- println(cm.mkToolBox().eval(Block(List(c, d), result)))
-} \ No newline at end of file
diff --git a/test/files/run/t7096.scala b/test/files/run/t7096.scala
index e9c0323c2e..2a93dcc571 100644
--- a/test/files/run/t7096.scala
+++ b/test/files/run/t7096.scala
@@ -23,7 +23,7 @@ class Sub extends Base {
import syms._
def check(source: String, unit: global.CompilationUnit) {
- afterTyper {
+ exitingTyper {
terms.filter(_.name.toString == "foo").foreach(sym => {
val xParam = sym.tpe.paramss.flatten.head
val annot = sym.tpe.finalResultType.annotations.head
diff --git a/test/files/run/t7120.check b/test/files/run/t7120.check
new file mode 100644
index 0000000000..45a4fb75db
--- /dev/null
+++ b/test/files/run/t7120.check
@@ -0,0 +1 @@
+8
diff --git a/test/files/run/t7120/Base_1.scala b/test/files/run/t7120/Base_1.scala
new file mode 100644
index 0000000000..be07b4f34f
--- /dev/null
+++ b/test/files/run/t7120/Base_1.scala
@@ -0,0 +1,10 @@
+// This bug doesn't depend on separate compilation,
+// in the interests of minimizing the log output during
+// debugging this problem, I've split the compilation.
+
+case class Container( v: String )
+
+trait Base[ T <: AnyRef ] {
+ type UserType = T
+ protected def defect: PartialFunction[ UserType, String ]
+}
diff --git a/test/files/run/t7120/Derived_2.scala b/test/files/run/t7120/Derived_2.scala
new file mode 100644
index 0000000000..e0de629f82
--- /dev/null
+++ b/test/files/run/t7120/Derived_2.scala
@@ -0,0 +1,9 @@
+trait Derived extends Base[ Container ] {
+ protected def defect = { case c: Container => c.v.toString }
+}
+
+// Erasure was ignoring the prefix `Derived#7001.this` when erasing
+// A1, and consequently used `Object` rather than `Container`, which
+// was only seen because that signature clashed with the bridge method.
+//
+// applyOrElse[A1 <: Derived#7001.this.UserType#7318, B1 >: String](x1: A1)
diff --git a/test/files/run/t7120/Run_3.scala b/test/files/run/t7120/Run_3.scala
new file mode 100644
index 0000000000..95e7f994ff
--- /dev/null
+++ b/test/files/run/t7120/Run_3.scala
@@ -0,0 +1,3 @@
+object Test extends Derived with App {
+ println( defect( Container( "8" ) ) )
+}
diff --git a/test/files/run/t7120b.check b/test/files/run/t7120b.check
new file mode 100644
index 0000000000..aa2f5e7c9f
--- /dev/null
+++ b/test/files/run/t7120b.check
@@ -0,0 +1,2 @@
+public int C$D.foo(java.lang.String)
+public int C$D.foo(java.lang.String)
diff --git a/test/files/run/t7120b.scala b/test/files/run/t7120b.scala
new file mode 100644
index 0000000000..9f6591aa06
--- /dev/null
+++ b/test/files/run/t7120b.scala
@@ -0,0 +1,27 @@
+trait Base[A] { type B = A; }
+class C extends Base[String] {
+ class D {
+ def foo[B1 <: B](b: B1) = 0
+ }
+}
+
+trait BaseHK[M[_], A] { type B = M[A]; }
+object BaseHK { type Id[X] = X }
+class CHK extends BaseHK[BaseHK.Id, String] {
+ class D {
+ def foo[B1 <: B](b: B1) = 0
+ }
+}
+
+
+object Test extends App {
+ val c = new C
+ val d = new c.D()
+ val meth = d.getClass.getMethods.find(_.getName == "foo").get
+ println(meth)
+
+ val chk = new CHK
+ val dhk = new chk.D()
+ val methhk = d.getClass.getMethods.find(_.getName == "foo").get
+ println(methhk)
+}
diff --git a/test/files/run/t7181.check b/test/files/run/t7181.check
new file mode 100644
index 0000000000..e4b8e30dfe
--- /dev/null
+++ b/test/files/run/t7181.check
@@ -0,0 +1,23 @@
+normal exit MainNormalExit
+finally MainNormalExit
+normal flow MainNormalExit
+
+return MainReturn
+finally MainReturn
+
+uncaught exception MainUncaughtException
+finally MainUncaughtException
+
+caught exception ExceptionNormalExit
+normal exit ExceptionNormalExit
+finally ExceptionNormalExit
+normal flow ExceptionNormalExit
+
+caught exception ExceptionReturn
+return ExceptionReturn
+finally ExceptionReturn
+
+caught exception ExceptionUncaughtException
+uncaught exception ExceptionUncaughtException
+finally ExceptionUncaughtException
+
diff --git a/test/files/run/t7181.scala b/test/files/run/t7181.scala
new file mode 100644
index 0000000000..a055e43481
--- /dev/null
+++ b/test/files/run/t7181.scala
@@ -0,0 +1,78 @@
+sealed abstract class Action
+// exit the try body normally
+case object MainNormalExit extends Action
+// exit the try body with a 'return'
+case object MainReturn extends Action
+// exit the try body with an uncaught exception
+case object MainUncaughtException extends Action
+// exit the try body with a caught exception and exit the exception handler normally
+case object ExceptionNormalExit extends Action
+// exit the try body with a caught exception and exit the exception handler with a 'return'
+case object ExceptionReturn extends Action
+// exit the try body with a caught exception and exit the exception handler with an uncaught exception
+case object ExceptionUncaughtException extends Action
+
+case class UncaughtException(action: Action) extends RuntimeException
+case class CaughtException(action: Action) extends RuntimeException
+
+object Test extends App {
+ def test(action: Action, expectException: Boolean = false) {
+ var gotException = false
+ val result = try
+ driver(action)
+ catch {
+ case UncaughtException(a) =>
+ gotException = true
+ a
+ }
+ if (gotException) assert(expectException, "Got unexpected exception")
+ else assert(!expectException, "Did not get expected exception")
+
+ assert(result == action, s"Expected $action but got $result")
+ println()
+ }
+
+ def driver(action: Action): Action = {
+ val result = try {
+ action match {
+ case MainNormalExit =>
+ println(s"normal exit $action")
+ action
+ case MainReturn =>
+ println(s"return $action")
+ return action
+ case MainUncaughtException =>
+ println(s"uncaught exception $action")
+ throw UncaughtException(action)
+ case _ =>
+ println(s"caught exception $action")
+ throw CaughtException(action)
+ }
+ } catch {
+ case CaughtException(action) => action match {
+ case ExceptionNormalExit =>
+ println(s"normal exit $action")
+ action
+ case ExceptionReturn =>
+ println(s"return $action")
+ return action
+ case ExceptionUncaughtException =>
+ println(s"uncaught exception $action")
+ throw UncaughtException(action)
+ case _ =>
+ sys.error(s"unexpected $action in exception handler")
+ }
+ } finally {
+ println(s"finally $action")
+ }
+ println(s"normal flow $action")
+ result
+ }
+
+ test(MainNormalExit)
+ test(MainReturn)
+ test(MainUncaughtException, true)
+ test(ExceptionNormalExit)
+ test(ExceptionReturn)
+ test(ExceptionUncaughtException, true)
+}
diff --git a/test/files/run/t7185.check b/test/files/run/t7185.check
index 455c1aa3b7..f95910ba5f 100644
--- a/test/files/run/t7185.check
+++ b/test/files/run/t7185.check
@@ -10,7 +10,7 @@ scala> import scala.reflect.runtime.universe._
import scala.reflect.runtime.universe._
scala> object O { def apply() = 0 }
-defined module O
+defined object O
scala> val ORef = reify { O }.tree
ORef: reflect.runtime.universe.Tree = $read.O
diff --git a/test/files/run/t7231.check b/test/files/run/t7231.check
new file mode 100644
index 0000000000..c1e4b6c175
--- /dev/null
+++ b/test/files/run/t7231.check
@@ -0,0 +1,2 @@
+null
+null
diff --git a/test/files/run/t7231.scala b/test/files/run/t7231.scala
new file mode 100644
index 0000000000..7d6bc81f3f
--- /dev/null
+++ b/test/files/run/t7231.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ val bar: Null = null
+
+ def foo(x: Array[Int]) = x
+ def baz(x: String) = x
+
+ // first line was failing
+ println(foo(bar))
+ // this line worked but good to have a double check
+ println(baz(bar))
+} \ No newline at end of file
diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala
index 04a1a8ba19..7d06a7e69d 100644
--- a/test/files/run/tailcalls.scala
+++ b/test/files/run/tailcalls.scala
@@ -194,10 +194,10 @@ object FancyTailCalls {
}
object PolyObject extends App {
- def tramp[A](x: Int): Int =
+ def tramp[A](x: Int): Int =
if (x > 0)
tramp[A](x - 1)
- else
+ else
0
}
@@ -233,7 +233,7 @@ class NonTailCall {
if (n == 0) 0
else f2(n - 1)
}
-
+
}
//############################################################################
@@ -273,7 +273,7 @@ object Test {
}
println
}
-
+
def check_overflow(name: String, closure: => Int) {
print("test " + name)
try {
@@ -295,7 +295,7 @@ object Test {
while (!stop) {
try {
calibrator.f(n, n);
- if (n >= Int.MaxValue / 2) error("calibration failure");
+ if (n >= Int.MaxValue / 2) sys.error("calibration failure");
n = 2 * n;
} catch {
case exception: compat.Platform.StackOverflowError => stop = true
@@ -367,7 +367,7 @@ object Test {
check_success("TailCall.g3", TailCall.g3(max, max, Nil), 0)
check_success("TailCall.h1", TailCall.h1(max, max ), 0)
println
-
+
val NonTailCall = new NonTailCall
check_success("NonTailCall.f1", NonTailCall.f1(2), 0)
check_overflow("NonTailCall.f2", NonTailCall.f2(max))
@@ -382,17 +382,17 @@ object Test {
}
// testing explicit tailcalls.
-
+
import scala.util.control.TailCalls._
def isEven(xs: List[Int]): TailRec[Boolean] =
if (xs.isEmpty) done(true) else tailcall(isOdd(xs.tail))
def isOdd(xs: List[Int]): TailRec[Boolean] =
- if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
+ if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
assert(isEven((1 to 100000).toList).result)
-
+
}
//############################################################################
diff --git a/test/files/run/test-cpp.scala b/test/files/run/test-cpp.scala
index 5b3bc7b746..f9fa85c4d0 100644
--- a/test/files/run/test-cpp.scala
+++ b/test/files/run/test-cpp.scala
@@ -3,7 +3,7 @@
* in the copy-propagation performed before ClosureElimination.
*
* In the general case, the local variable 'l' is connected through
- * a alias chain with other local variables and at the end of the
+ * an alias chain with other local variables and at the end of the
* alias chain there may be a Value, call it 'v'.
*
* If 'v' is cheaper to access (it is a Deref(This) or Const(_)), then
diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.check b/test/files/run/toolbox_typecheck_implicitsdisabled.check
index db64e118ca..009ba651fe 100644
--- a/test/files/run/toolbox_typecheck_implicitsdisabled.check
+++ b/test/files/run/toolbox_typecheck_implicitsdisabled.check
@@ -1,5 +1,5 @@
{
import scala.Predef._;
- scala.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+ scala.Predef.ArrowAssoc[Int](1).->[Int](2)
}
scala.tools.reflect.ToolBoxError: reflective typecheck has failed: value -> is not a member of Int
diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.scala b/test/files/run/toolbox_typecheck_implicitsdisabled.scala
index 8a3d433142..95a7056279 100644
--- a/test/files/run/toolbox_typecheck_implicitsdisabled.scala
+++ b/test/files/run/toolbox_typecheck_implicitsdisabled.scala
@@ -7,16 +7,16 @@ object Test extends App {
val toolbox = cm.mkToolBox()
val tree1 = Block(List(
- Import(Select(Ident(newTermName("scala")), newTermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
- Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ Import(Select(Ident(TermName("scala")), TermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
+ Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
)
val ttree1 = toolbox.typeCheck(tree1, withImplicitViewsDisabled = false)
println(ttree1)
try {
val tree2 = Block(List(
- Import(Select(Ident(newTermName("scala")), newTermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
- Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ Import(Select(Ident(TermName("scala")), TermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
+ Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
)
val ttree2 = toolbox.typeCheck(tree2, withImplicitViewsDisabled = true)
println(ttree2)
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.scala b/test/files/run/toolbox_typecheck_macrosdisabled.scala
index 51eb63f294..1f7fda8575 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled.scala
+++ b/test/files/run/toolbox_typecheck_macrosdisabled.scala
@@ -15,11 +15,11 @@ object Test extends App {
val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
build.setTypeSignature(ru, rutpe)
- val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
+ val tree1 = Apply(Select(Ident(ru), TermName("reify")), List(Literal(Constant(2))))
val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
println(ttree1)
- val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
+ val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Literal(Constant(2))))
val ttree2 = toolbox.typeCheck(tree2, withMacrosDisabled = true)
println(ttree2)
}
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.check b/test/files/run/toolbox_typecheck_macrosdisabled2.check
index e7011d1ae2..bdcdb421fd 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled2.check
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.check
@@ -19,7 +19,7 @@
def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
val $u: U = $m$untyped.universe;
val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
- $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+ $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.TermName.apply("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
}
};
new $treecreator1()
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.scala b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
index 74fd09d9fd..7bfe3ba8a4 100644
--- a/test/files/run/toolbox_typecheck_macrosdisabled2.scala
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
@@ -15,11 +15,11 @@ object Test extends App {
val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
build.setTypeSignature(ru, rutpe)
- val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val tree1 = Apply(Select(Ident(ru), TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
println(ttree1)
- val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
val ttree2 = toolbox.typeCheck(tree2, withMacrosDisabled = true)
println(ttree2)
}
diff --git a/test/files/run/try-2.scala b/test/files/run/try-2.scala
index 677f0b48eb..da321f2668 100644
--- a/test/files/run/try-2.scala
+++ b/test/files/run/try-2.scala
@@ -7,7 +7,7 @@
object Test {
- def tryAllUnit: Unit =
+ def tryAllUnit: Unit =
try {
throw new Error();
}
@@ -15,28 +15,28 @@ object Test {
case _ => Console.println("exception happened\n");
}
- def tryUnitAll: Unit =
+ def tryUnitAll: Unit =
try {
Console.println("Nothin");
} catch {
- case _ => error("Bad, bad, lama!");
+ case _ => sys.error("Bad, bad, lama!");
}
- def tryAllAll: Unit =
+ def tryAllAll: Unit =
try {
throw new Error();
} catch {
- case _ => error("Bad, bad, lama!");
+ case _ => sys.error("Bad, bad, lama!");
}
- def tryUnitUnit: Unit =
+ def tryUnitUnit: Unit =
try {
Console.println("Nothin");
} catch {
case _ => Console.println("Nothin");
}
- def tryIntUnit: Unit =
+ def tryIntUnit: Unit =
try {
10;
} catch {
@@ -55,7 +55,7 @@ object Test {
execute(tryAllUnit);
execute(tryUnitAll);
execute(tryAllAll);
- execute(tryUnitUnit);
+ execute(tryUnitUnit);
execute(tryIntUnit);
}
}
diff --git a/test/files/run/try.scala b/test/files/run/try.scala
index ad3d606246..e393c0b4b1 100644
--- a/test/files/run/try.scala
+++ b/test/files/run/try.scala
@@ -17,8 +17,8 @@ object Test extends AnyRef with App {
Console.println(
(try { x } catch {
case _: Error => 1;
- })
- +
+ })
+ +
(try { x } catch {
case _: Error => 1;
})
@@ -61,13 +61,13 @@ object Test extends AnyRef with App {
Console.print("1 + 1 = ");
try {
if (true)
- error("exit");
+ sys.error("exit");
1+1;
()
} catch {
case _ =>
Console.println("2");
- error("for good");
+ sys.error("for good");
}
Console.println("a");
} catch {
@@ -116,7 +116,7 @@ object Test extends AnyRef with App {
}
*/
-
+
try1;
try2;
try3;
diff --git a/test/files/run/unreachable.scala b/test/files/run/unreachable.scala
new file mode 100644
index 0000000000..50a8d88b7c
--- /dev/null
+++ b/test/files/run/unreachable.scala
@@ -0,0 +1,125 @@
+object Test extends App {
+ def unreachableNormalExit: Int = {
+ return 42
+ 0
+ }
+
+ def unreachableIf: Int = {
+ return 42
+ if (util.Random.nextInt % 2 == 0)
+ 0
+ else
+ 1
+ }
+
+ def unreachableIfBranches: Int = {
+ if (util.Random.nextInt % 2 == 0)
+ return 42
+ else
+ return 42
+
+ return 0
+ }
+
+ def unreachableOneLegIf: Int = {
+ if (util.Random.nextInt % 2 == 0)
+ return 42
+
+ return 42
+ }
+
+ def unreachableLeftBranch: Int = {
+ val result = if (util.Random.nextInt % 2 == 0)
+ return 42
+ else
+ 42
+
+ return result
+ }
+
+ def unreachableRightBranch: Int = {
+ val result = if (util.Random.nextInt % 2 == 0)
+ 42
+ else
+ return 42
+
+ return result
+ }
+
+ def unreachableTryCatchFinally: Int = {
+ return 42
+ try {
+ return 0
+ } catch {
+ case x: Throwable => return 1
+ } finally {
+ return 2
+ }
+ return 3
+ }
+
+ def unreachableAfterTry: Int = {
+ try {
+ return 42
+ } catch {
+ case x: Throwable => return 2
+ }
+ return 3
+ }
+
+ def unreachableAfterCatch: Int = {
+ try {
+ error("haha")
+ } catch {
+ case x: Throwable => return 42
+ }
+ return 3
+ }
+
+ def unreachableAfterFinally: Int = {
+ try {
+ return 1
+ } catch {
+ case x: Throwable => return 2
+ } finally {
+ return 42
+ }
+ return 3
+ }
+
+ def unreachableSwitch: Int = {
+ return 42
+ val x = util.Random.nextInt % 2
+ x match {
+ case 0 => return 0
+ case 1 => return 1
+ case -1 => return 2
+ }
+ 3
+ }
+
+ def unreachableAfterSwitch: Int = {
+ val x = util.Random.nextInt % 2
+ x match {
+ case 0 => return 42
+ case 1 => return 41 + x
+ case -1 => return 43 + x
+ }
+ 2
+ }
+
+ def check(f: Int) = assert(f == 42, s"Expected 42 but got $f")
+
+ check(unreachableNormalExit)
+ check(unreachableIf)
+ check(unreachableIfBranches)
+ check(unreachableOneLegIf)
+ check(unreachableLeftBranch)
+ check(unreachableRightBranch)
+ check(unreachableTryCatchFinally)
+ check(unreachableAfterTry)
+ check(unreachableAfterCatch)
+ check(unreachableAfterFinally)
+ check(unreachableSwitch)
+ check(unreachableAfterSwitch)
+} \ No newline at end of file
diff --git a/test/files/run/verify-ctor.scala b/test/files/run/verify-ctor.scala
index 17e4f71be5..528d038a8e 100644
--- a/test/files/run/verify-ctor.scala
+++ b/test/files/run/verify-ctor.scala
@@ -1,6 +1,6 @@
class Foo(val str: String) {
def this(arr: Array[Char]) = this({
- if (arr.length == 0) exit(1)
+ if (arr.length == 0) sys.exit(1)
new String(arr)
})
}
diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala
index 4e8480d72e..4d0cab4693 100644
--- a/test/files/scalacheck/CheckEither.scala
+++ b/test/files/scalacheck/CheckEither.scala
@@ -8,18 +8,18 @@ import org.scalacheck.ConsoleReporter.testStatsEx
import Function.tupled
object Test extends Properties("Either") {
- implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
+ implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_))))
- val prop_either1 = forAll((n: Int) => Left(n).fold(x => x, b => error("fail")) == n)
+ val prop_either1 = forAll((n: Int) => Left(n).fold(x => x, b => sys.error("fail")) == n)
- val prop_either2 = forAll((n: Int) => Right(n).fold(a => error("fail"), x => x) == n)
+ val prop_either2 = forAll((n: Int) => Right(n).fold(a => sys.error("fail"), x => x) == n)
val prop_swap = forAll((e: Either[Int, Int]) => e match {
case Left(a) => e.swap.right.get == a
case Right(b) => e.swap.left.get == b
})
-
+
val prop_isLeftRight = forAll((e: Either[Int, Int]) => e.isLeft != e.isRight)
object CheckLeftProjection {
@@ -35,7 +35,7 @@ object Test extends Properties("Either") {
val prop_exists = forAll((e: Either[Int, Int]) =>
e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.get % 2 == 0))
-
+
val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => {
def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s)
Left(n).left.flatMap(f(_)) == f(n)})
@@ -115,7 +115,7 @@ object Test extends Properties("Either") {
}
val prop_Either_left = forAll((n: Int) => Left(n).left.get == n)
-
+
val prop_Either_right = forAll((n: Int) => Right(n).right.get == n)
val prop_Either_joinLeft = forAll((e: Either[Either[Int, Int], Int]) => e match {
@@ -128,12 +128,12 @@ object Test extends Properties("Either") {
case Right(ee) => e.joinRight == ee
})
- val prop_Either_reduce = forAll((e: Either[Int, Int]) =>
+ val prop_Either_reduce = forAll((e: Either[Int, Int]) =>
e.merge == (e match {
case Left(a) => a
case Right(a) => a
}))
-
+
/** Hard to believe I'm "fixing" a test to reflect B before A ... */
val prop_Either_cond = forAll((c: Boolean, a: Int, b: Int) =>
Either.cond(c, a, b) == (if(c) Right(a) else Left(b)))
@@ -168,19 +168,19 @@ object Test extends Properties("Either") {
("Right.prop_seq", CheckRightProjection.prop_seq),
("Right.prop_option", CheckRightProjection.prop_option),
("prop_Either_left", prop_Either_left),
- ("prop_Either_right", prop_Either_right),
+ ("prop_Either_right", prop_Either_right),
("prop_Either_joinLeft", prop_Either_joinLeft),
- ("prop_Either_joinRight", prop_Either_joinRight),
- ("prop_Either_reduce", prop_Either_reduce),
+ ("prop_Either_joinRight", prop_Either_joinRight),
+ ("prop_Either_reduce", prop_Either_reduce),
("prop_Either_cond", prop_Either_cond)
)
-
+
for ((label, prop) <- tests) {
property(label) = prop
}
-
+
import org.scalacheck.{ Test => STest }
-
+
def runTests() = {
STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), this)
}
diff --git a/test/files/scalacheck/ReflectionExtractors.scala b/test/files/scalacheck/ReflectionExtractors.scala
new file mode 100644
index 0000000000..a2615feb3e
--- /dev/null
+++ b/test/files/scalacheck/ReflectionExtractors.scala
@@ -0,0 +1,52 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object Test extends Properties("reflection extractors") {
+
+ val genFlag = oneOf(
+ TRAIT, INTERFACE, MUTABLE, MACRO, DEFERRED, ABSTRACT, FINAL, SEALED,
+ IMPLICIT, LAZY, OVERRIDE, PRIVATE, PROTECTED, LOCAL, CASE, ABSOVERRIDE,
+ BYNAMEPARAM, PARAM, COVARIANT, CONTRAVARIANT, DEFAULTPARAM, PRESUPER,
+ DEFAULTINIT
+ )
+ val genModifiers =
+ for(flag <- genFlag; privateWithin <- genName)
+ yield Modifiers(flag, privateWithin, Nil)
+ val genTermName = for(name <- arbitrary[String]) yield TermName(name)
+ val genTypeName = for(name <- arbitrary[String]) yield TypeName(name)
+ val genName = oneOf(genTermName, genTypeName)
+
+ implicit val arbTermName: Arbitrary[TermName] = Arbitrary(genTermName)
+ implicit val arbTypeName: Arbitrary[TypeName] = Arbitrary(genTypeName)
+ implicit val arbName: Arbitrary[Name] = Arbitrary(genName)
+ implicit val arbMods: Arbitrary[Modifiers] = Arbitrary(genModifiers)
+
+ property("extract term name") = forAll { (name: TermName) =>
+ val TermName(s) = name
+ s == name.toString
+ }
+
+ property("extract type name") = forAll { (name: TypeName) =>
+ val TypeName(s) = name
+ s == name.toString
+ }
+
+ property("extract term or type name") = forAll { (name: Name) =>
+ name match {
+ case TermName(s) => s == name.toString
+ case TypeName(s) => s == name.toString
+ }
+ }
+
+ property("extract modifiers") = forAll { (mods: Modifiers) =>
+ val Modifiers(flags, priv, annots) = mods
+ flags == mods.flags &&
+ priv == mods.privateWithin &&
+ annots == mods.annotations
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala
deleted file mode 100644
index bbc6504f58..0000000000
--- a/test/files/scalacheck/redblack.scala
+++ /dev/null
@@ -1,213 +0,0 @@
-import org.scalacheck._
-import Prop._
-import Gen._
-
-/*
-Properties of a Red & Black Tree:
-
-A node is either red or black.
-The root is black. (This rule is used in some definitions and not others. Since the
-root can always be changed from red to black but not necessarily vice-versa this
-rule has little effect on analysis.)
-All leaves are black.
-Both children of every red node are black.
-Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
-*/
-
-abstract class RedBlackTest extends Properties("RedBlack") {
- def minimumSize = 0
- def maximumSize = 5
-
- object RedBlackTest extends scala.collection.immutable.RedBlack[String] {
- def isSmaller(x: String, y: String) = x < y
- }
-
- import RedBlackTest._
-
- def nodeAt[A](tree: Tree[A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0)
- Some(tree.iterator.drop(n).next)
- else
- None
-
- def treeContains[A](tree: Tree[A], key: String) = tree.iterator.map(_._1) contains key
-
- def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] =
- if (level == 0) {
- value(Empty)
- } else {
- for {
- oddOrEven <- choose(0, 2)
- tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
- isRed = parentIsBlack && tryRed
- nextLevel = if (isRed) level else level - 1
- left <- mkTree(nextLevel, !isRed, label + "L")
- right <- mkTree(nextLevel, !isRed, label + "R")
- } yield {
- if (isRed)
- RedTree(label + "N", 0, left, right)
- else
- BlackTree(label + "N", 0, left, right)
- }
- }
-
- def genTree = for {
- depth <- choose(minimumSize, maximumSize + 1)
- tree <- mkTree(depth)
- } yield tree
-
- type ModifyParm
- def genParm(tree: Tree[Int]): Gen[ModifyParm]
- def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int]
-
- def genInput: Gen[(Tree[Int], ModifyParm, Tree[Int])] = for {
- tree <- genTree
- parm <- genParm(tree)
- } yield (tree, parm, modify(tree, parm))
-}
-
-trait RedBlackInvariants {
- self: RedBlackTest =>
-
- import RedBlackTest._
-
- def rootIsBlack[A](t: Tree[A]) = t.isBlack
-
- def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match {
- case Empty => t.isBlack
- case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack
- }
-
- def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match {
- case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t))
- case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
- case Empty => true
- }
-
- def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match {
- case Empty => List(1)
- case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
- case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
- }
-
- def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match {
- case Empty => true
- case ne: NonEmpty[_] =>
- (
- blackNodesToLeaves(ne).distinct.size == 1
- && areBlackNodesToLeavesEqual(ne.left)
- && areBlackNodesToLeavesEqual(ne.right)
- )
- }
-
- def orderIsPreserved[A](t: Tree[A]): Boolean =
- t.iterator zip t.iterator.drop(1) forall { case (x, y) => isSmaller(x._1, y._1) }
-
- def setup(invariant: Tree[Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
- invariant(newTree)
- }
-
- property("root is black") = setup(rootIsBlack)
- property("all leaves are black") = setup(areAllLeavesBlack)
- property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
- property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
- property("ordering of keys is preserved") = setup(orderIsPreserved)
-}
-
-object TestInsert extends RedBlackTest with RedBlackInvariants {
- import RedBlackTest._
-
- override type ModifyParm = Int
- override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1)
- override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = tree update (generateKey(tree, parm), 0)
-
- def generateKey(tree: Tree[Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
- case Some((key, _)) => key.init.mkString + "MN"
- case None => nodeAt(tree, parm - 1) match {
- case Some((key, _)) => key.init.mkString + "RN"
- case None => "N"
- }
- }
-
- property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
- treeContains(newTree, generateKey(tree, parm))
- }
-}
-
-object TestModify extends RedBlackTest {
- import RedBlackTest._
-
- def newValue = 1
- override def minimumSize = 1
- override type ModifyParm = Int
- override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
- override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
- case (key, _) => tree update (key, newValue)
- } getOrElse tree
-
- property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
- nodeAt(tree,parm) forall { case (key, _) =>
- newTree.iterator contains (key, newValue)
- }
- }
-}
-
-object TestDelete extends RedBlackTest with RedBlackInvariants {
- import RedBlackTest._
-
- override def minimumSize = 1
- override type ModifyParm = Int
- override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
- override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
- case (key, _) => tree delete key
- } getOrElse tree
-
- property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
- nodeAt(tree, parm) forall { case (key, _) =>
- !treeContains(newTree, key)
- }
- }
-}
-
-object TestRange extends RedBlackTest with RedBlackInvariants {
- import RedBlackTest._
-
- override type ModifyParm = (Option[Int], Option[Int])
- override def genParm(tree: Tree[Int]): Gen[ModifyParm] = for {
- from <- choose(0, tree.iterator.size)
- to <- choose(0, tree.iterator.size) suchThat (from <=)
- optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
- optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
- } yield (optionalFrom, optionalTo)
-
- override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = {
- val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
- val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
- tree range (from, to)
- }
-
- property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
- val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
- val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
- ("lower boundary" |: (from forall ( key => newTree.iterator.map(_._1) forall (key <=)))) &&
- ("upper boundary" |: (to forall ( key => newTree.iterator.map(_._1) forall (key >))))
- }
-
- property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
- val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
- val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
- val filteredTree = (tree.iterator
- .map(_._1)
- .filter(key => from forall (key >=))
- .filter(key => to forall (key <))
- .toList)
- filteredTree == newTree.iterator.map(_._1).toList
- }
-}
-
-object Test extends Properties("RedBlack") {
- include(TestInsert)
- include(TestModify)
- include(TestDelete)
- include(TestRange)
-}
-
diff --git a/test/flaky/pos/t2868.cmds b/test/flaky/pos/t2868.cmds
deleted file mode 100644
index ed8124a9e0..0000000000
--- a/test/flaky/pos/t2868.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac Jann.java Nest.java
-scalac pick_1.scala
-scalac test_2.scala
diff --git a/test/instrumented/library/scala/runtime/BoxesRunTime.java b/test/instrumented/library/scala/runtime/BoxesRunTime.java
index 172ed8ee14..673c047dfe 100644
--- a/test/instrumented/library/scala/runtime/BoxesRunTime.java
+++ b/test/instrumented/library/scala/runtime/BoxesRunTime.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
index 5a3f83015f..d8d093e93b 100644
--- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala
+++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala
index 8a0a05d531..68fedb7c83 100644
--- a/test/osgi/src/BasicReflection.scala
+++ b/test/osgi/src/BasicReflection.scala
@@ -1,10 +1,10 @@
package tools.test.osgi
package reflection
package basic
-
+
import org.junit.Assert._
import org.ops4j.pax.exam.CoreOptions._
-
+
import org.junit.Test
import org.junit.runner.RunWith
import org.ops4j.pax.exam
@@ -41,15 +41,15 @@ object M
class BasicReflectionTest extends ScalaOsgiHelper {
@Configuration
- def config(): Array[exam.Option] =
+ def config(): Array[exam.Option] =
justReflectionOptions
// Ensure Pax-exam requires C/M in our module
def dummy = {
new C
- M.toString
+ M.toString
}
-
+
@Test
def basicMirrorThroughOsgi(): Unit = {
// Note for now just assert that we can do this stuff.
@@ -57,10 +57,10 @@ class BasicReflectionTest extends ScalaOsgiHelper {
val cm = runtimeMirror(classOf[C].getClassLoader)
val im = cm.reflect(new C)
assertEquals("Unable to reflect field name!",
- "value f1",
- im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).symbol.toString)
+ "value f1",
+ im.reflectField(typeOf[C].member(TermName("f1")).asTerm).symbol.toString)
assertEquals("Unable to reflect value!",
- 2,
- im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).get)
+ 2,
+ im.reflectField(typeOf[C].member(TermName("f1")).asTerm).get)
}
}
diff --git a/test/partest b/test/partest
index dd57137b21..9b0ab02fdc 100755
--- a/test/partest
+++ b/test/partest
@@ -3,7 +3,7 @@
##############################################################################
# Scala test runner 2.8.0
##############################################################################
-# (c) 2002-2011 LAMP/EPFL
+# (c) 2002-2013 LAMP/EPFL
#
# This is free software; see the distribution for copying conditions.
# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
@@ -70,21 +70,32 @@ if $cygwin; then
else
format=windows
fi
+ if [ -n "${JAVA_HOME}" ] ; then
+ JAVA_HOME=`cygpath --$format "$JAVA_HOME"`
+ fi
+ if [ -n "${JAVACMD}" ] ; then
+ JAVACMD=`cygpath --$format "$JAVACMD"`
+ fi
SCALA_HOME=`cygpath --$format "$SCALA_HOME"`
EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
fi
-# last arg wins, so if JAVA_OPTS already contains one of these options
-# the supplied argument will be used.
+# last arg wins, so if JAVA_OPTS already contains -Xmx or -Xms the
+# supplied argument will be used.
# At this writing it is reported test/partest --all requires 108m permgen.
JAVA_OPTS="-Xmx1024M -Xms64M -XX:MaxPermSize=128M $JAVA_OPTS"
+# the ant task doesn't supply any options by default,
+# so don't to that here either -- note that you may want to pass -optimise
+# to mimic what happens during nightlies
+# [ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
+
partestDebugStr=""
if [ ! -z "${PARTEST_DEBUG}" ] ; then
partestDebugStr="-Dpartest.debug=${PARTEST_DEBUG}"
fi
-${JAVACMD:=java} \
+"${JAVACMD:=java}" \
$JAVA_OPTS -cp "$EXT_CLASSPATH" \
${partestDebugStr} \
-Dscala.home="${SCALA_HOME}" \
diff --git a/test/partest.bat b/test/partest.bat
index b64347ce13..1806e80888 100755
--- a/test/partest.bat
+++ b/test/partest.bat
@@ -3,7 +3,7 @@
rem ##########################################################################
rem # Scala code runner 2.9.1.final
rem ##########################################################################
-rem # (c) 2002-2011 LAMP/EPFL
+rem # (c) 2002-2013 LAMP/EPFL
rem #
rem # This is free software; see the distribution for copying conditions.
rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
diff --git a/test/pending/jvm/cf-attributes.scala b/test/pending/jvm/cf-attributes.scala
index 9e0e9d95de..f4964b63b1 100644
--- a/test/pending/jvm/cf-attributes.scala
+++ b/test/pending/jvm/cf-attributes.scala
@@ -52,14 +52,14 @@ object anonymousFunctions {
}
object anonymousClasses {
- //InnerClass:
+ //InnerClass:
// public abstract #_= #_ of #_; //Foo=class anonymousClasses$Foo of class anonymousClasses$
// public abstract #_= #_ of #_; //Foo$class=class anonymousClasses$Foo$class of class anonymousClasses$
trait Foo {
def foo() { println("foo"); }
override def toString = getClass.getName
}
- //InnerClass:
+ //InnerClass:
// public final #_; //class anonymousClasses$$anon$1 of class anonymousClasses$
val x = new Foo() {
override def foo() { println("foo (overriden)"); }
@@ -88,16 +88,16 @@ trait Test1 {
trait Test2 {
@throws(classOf[Exception])
- def printInnerClasses(cls: Class[_]) {
- import java.io._, ch.epfl.lamp.fjbg._
- val fjbgContext = new FJBGContext(49, 0)
- val outDir = System.getProperty("partest.output", "cf-attributes.obj")
- val fileName = outDir+File.separator+cls.getName+".class"
- val in = new DataInputStream(new FileInputStream(fileName))
- val jclass = fjbgContext.JClass(in)
- println(jclass.getInnerClasses)
- in.close()
- }
+ // def printInnerClasses(cls: Class[_]) {
+ // import java.io._, ch.epfl.lamp.fjbg._
+ // val fjbgContext = new FJBGContext(49, 0)
+ // val outDir = System.getProperty("partest.output", "cf-attributes.obj")
+ // val fileName = outDir+File.separator+cls.getName+".class"
+ // val in = new DataInputStream(new FileInputStream(fileName))
+ // val jclass = fjbgContext.JClass(in)
+ // println(jclass.getInnerClasses)
+ // in.close()
+ // }
def printClass(name: String) {
try { printClass(Class.forName(name)) }
catch { case e: Exception => println(e) }
@@ -105,7 +105,7 @@ trait Test2 {
def printClass(cls: Class[_]) {
println("\n[[ "+cls.getName+" ]]");
try { printInnerClasses(cls) }
- catch { case e: Exception => println(e) }
+ catch { case e: Exception => println(e) }
}
}
diff --git a/test/pending/pos/overloading-boundaries.scala b/test/pending/pos/overloading-boundaries.scala
new file mode 100644
index 0000000000..d2e9fdbb12
--- /dev/null
+++ b/test/pending/pos/overloading-boundaries.scala
@@ -0,0 +1,37 @@
+package bar {
+ object bippy extends (Double => String) {
+ def apply(x: Double): String = "Double"
+ }
+}
+
+package object bar {
+ def bippy(x: Int, y: Int, z: Int) = "(Int, Int, Int)"
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(bar.bippy(5.5d))
+ println(bar.bippy(1, 2, 3))
+ }
+}
+
+/****
+
+% scalac3 a.scala
+a.scala:13: error: not enough arguments for method bippy: (x: Int, y: Int, z: Int)String.
+Unspecified value parameters y, z.
+ println(bar.bippy(5.5d))
+ ^
+one error found
+
+# Comment out the call to bar.bippy(5.5d) - compiles
+% scalac3 a.scala
+
+# Compiles only from pure source though - if classes are present, fails.
+% scalac3 a.scala
+a.scala:2: error: bippy is already defined as method bippy in package object bar
+ object bippy extends (Double => String) {
+ ^
+one error found
+
+****/
diff --git a/test/pending/pos/t1751.cmds b/test/pending/pos/t1751.cmds
deleted file mode 100644
index d4a4898ffd..0000000000
--- a/test/pending/pos/t1751.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac SuiteClasses.java
-scalac A2_1.scala
-scalac A1_2.scala
diff --git a/test/pending/pos/t1782.cmds b/test/pending/pos/t1782.cmds
deleted file mode 100644
index 61f3d3788e..0000000000
--- a/test/pending/pos/t1782.cmds
+++ /dev/null
@@ -1,2 +0,0 @@
-javac Ann.java Days.java ImplementedBy.java
-scalac Test_1.scala
diff --git a/test/pending/pos/t1832.scala b/test/pending/pos/t1832.scala
deleted file mode 100644
index bca863f4bd..0000000000
--- a/test/pending/pos/t1832.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-// Edit by paulp: reduced.
-trait Cloning {
- trait Foo
- def fn(g: Int => Unit): Foo
-
- implicit def mkStar(i: Int) = new { def *(a: Foo): Foo = null }
-
- val pool1 = 4 * fn { case i => i * 2 }
- val pool2 = 4 * fn { case i: Int => i * 2 }
-}
diff --git a/test/pending/pos/t294.cmds b/test/pending/pos/t294.cmds
deleted file mode 100644
index 62c9a5a068..0000000000
--- a/test/pending/pos/t294.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac Ann.java Ann2.java
-scalac Test_1.scala
-scalac Test_2.scala
diff --git a/test/pending/pos/t4612.scala b/test/pending/pos/t4612.scala
new file mode 100644
index 0000000000..a93c12ef01
--- /dev/null
+++ b/test/pending/pos/t4612.scala
@@ -0,0 +1,15 @@
+class CyclicReferenceCompilerBug {
+ trait Trait[A] {
+ def foo: A
+ }
+
+ class Class extends Trait[Class] {
+ def foo = new Class
+
+ trait OtherTrait extends Trait[OtherTrait] {
+ self: Class =>
+
+ def foo = new Class
+ }
+ }
+}
diff --git a/test/pending/pos/t4695/T_1.scala b/test/pending/pos/t4695/T_1.scala
new file mode 100644
index 0000000000..70fb1a7f21
--- /dev/null
+++ b/test/pending/pos/t4695/T_1.scala
@@ -0,0 +1,4 @@
+package foo
+
+class Bar { }
+package object Bar { }
diff --git a/test/pending/pos/t4695/T_2.scala b/test/pending/pos/t4695/T_2.scala
new file mode 100644
index 0000000000..70fb1a7f21
--- /dev/null
+++ b/test/pending/pos/t4695/T_2.scala
@@ -0,0 +1,4 @@
+package foo
+
+class Bar { }
+package object Bar { }
diff --git a/test/pending/pos/t4717.scala b/test/pending/pos/t4717.scala
deleted file mode 100644
index 7eaa3dd487..0000000000
--- a/test/pending/pos/t4717.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-trait Bounds[@specialized A] {
- // okay without `>: A`
- def x[B >: A]: Unit = new Bounds[B] {
- lazy val it = ??? // def or val okay
- it
- }
-} \ No newline at end of file
diff --git a/test/pending/pos/t5082.scala b/test/pending/pos/t5082.scala
new file mode 100644
index 0000000000..20a6cfc55f
--- /dev/null
+++ b/test/pending/pos/t5082.scala
@@ -0,0 +1,8 @@
+object Test {
+ sealed trait A
+ case object A1 extends A
+}
+
+trait Something[T]
+
+case class Test() extends Something[Test.A]
diff --git a/test/pending/pos/t5259.scala b/test/pending/pos/t5259.scala
deleted file mode 100644
index 317e28a9dc..0000000000
--- a/test/pending/pos/t5259.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-object DefaultArgBogusTypeMismatch {
-
- class A[T]
- class B {
- type T = this.type
- def m(implicit a : A[T] = new A[T]) = a
- }
-
- def newB = new B
- val a1 = newB.m // Bogus type mismatch
-
- val stableB = new B
- val a2 = stableB.m // OK
-}
diff --git a/test/pending/pos/t5399.scala b/test/pending/pos/t5399.scala
deleted file mode 100644
index 89caba39c1..0000000000
--- a/test/pending/pos/t5399.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class Test {
- class A[T]
- class B[T](val a: A[T])
-
- case class CaseClass[T](x: T)
-
- def break(existB: B[_]) = CaseClass(existB.a) match { case CaseClass(_) => }
-}
diff --git a/test/pending/pos/t5459.scala b/test/pending/pos/t5459.scala
new file mode 100644
index 0000000000..971e6f896d
--- /dev/null
+++ b/test/pending/pos/t5459.scala
@@ -0,0 +1,48 @@
+trait A1
+trait A2
+trait A3
+trait L1 extends A1 with A2 with A3
+
+object Test {
+ trait T1[-A <: A1]
+ trait T2[-A >: L1]
+ trait T3[ A <: A1]
+ trait T4[ A >: L1]
+ trait T5[+A <: A1]
+ trait T6[+A >: L1]
+
+ def f1(x: T1[_]) = x
+ def f2(x: T2[_]) = x
+ def f3(x: T3[_]) = x
+ def f4(x: T4[_]) = x
+ def f5(x: T5[_]) = x
+ def f6(x: T6[_]) = x
+ // a.scala:22: error: type arguments [Any] do not conform to trait T5's type parameter bounds [+A <: A1]
+ // def f5(x: T5[_]) = x
+ // ^
+
+ def g1(x: T1[_ <: A1]) = x
+ def g2(x: T2[_ >: L1]) = x
+ def g3(x: T3[_ <: A1]) = x
+ def g4(x: T4[_ >: L1]) = x
+ def g5(x: T5[_ <: A1]) = x
+ def g6(x: T6[_ >: L1]) = x
+
+ def q1(x: T1[_ >: L1]) = x
+ def q2(x: T2[_ <: A1]) = x
+ def q3(x: T3[_ >: L1]) = x
+ def q4(x: T4[_ <: A1]) = x
+ def q5(x: T5[_ >: L1]) = x
+ def q6(x: T6[_ <: A1]) = x
+ // a.scala:41: error: type arguments [Any] do not conform to trait T5's type parameter bounds [+A <: A1]
+ // def q5(x: T5[_ >: L1]) = x
+ // ^
+ // two errors found
+
+ def h1(x: T1[_ >: L1 <: A1]) = x
+ def h2(x: T2[_ >: L1 <: A1]) = x
+ def h3(x: T3[_ >: L1 <: A1]) = x
+ def h4(x: T4[_ >: L1 <: A1]) = x
+ def h5(x: T5[_ >: L1 <: A1]) = x
+ def h6(x: T6[_ >: L1 <: A1]) = x
+}
diff --git a/test/pending/pos/t5626.scala b/test/pending/pos/t5626.scala
deleted file mode 100644
index 7ab3881827..0000000000
--- a/test/pending/pos/t5626.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test {
- val blob0 = new {
- case class Foo(i : Int)
- }
- val foo0 = blob0.Foo(22)
-
- val blob1 = new {
- class Foo(i: Int)
- object Foo { def apply(i: Int): Foo = new Foo(i) }
- }
- val foo1 = blob1.Foo(22)
-}
diff --git a/test/pending/pos/t5654.scala b/test/pending/pos/t5654.scala
deleted file mode 100644
index eb711a5f37..0000000000
--- a/test/pending/pos/t5654.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-case class Bomb(a: Array[_])
-case class Bomb2(a: Array[T] forSome { type T })
-class Okay1(a: Array[_])
-case class Okay2(s: Seq[_]) \ No newline at end of file
diff --git a/test/pending/pos/t5877.scala b/test/pending/pos/t5877.scala
new file mode 100644
index 0000000000..b77605f7f2
--- /dev/null
+++ b/test/pending/pos/t5877.scala
@@ -0,0 +1,5 @@
+package foo { }
+
+package object foo {
+ implicit class Foo(val s: String) { }
+}
diff --git a/test/pending/pos/t5954/T_1.scala b/test/pending/pos/t5954/T_1.scala
new file mode 100644
index 0000000000..0064c596b6
--- /dev/null
+++ b/test/pending/pos/t5954/T_1.scala
@@ -0,0 +1,8 @@
+package p {
+ package base {
+ class X
+ }
+ package object base {
+ case class B()
+ }
+}
diff --git a/test/pending/pos/t5954/T_2.scala b/test/pending/pos/t5954/T_2.scala
new file mode 100644
index 0000000000..0064c596b6
--- /dev/null
+++ b/test/pending/pos/t5954/T_2.scala
@@ -0,0 +1,8 @@
+package p {
+ package base {
+ class X
+ }
+ package object base {
+ case class B()
+ }
+}
diff --git a/test/pending/pos/t5954/T_3.scala b/test/pending/pos/t5954/T_3.scala
new file mode 100644
index 0000000000..0064c596b6
--- /dev/null
+++ b/test/pending/pos/t5954/T_3.scala
@@ -0,0 +1,8 @@
+package p {
+ package base {
+ class X
+ }
+ package object base {
+ case class B()
+ }
+}
diff --git a/test/pending/pos/t6225.scala b/test/pending/pos/t6225.scala
new file mode 100644
index 0000000000..d7dff3c419
--- /dev/null
+++ b/test/pending/pos/t6225.scala
@@ -0,0 +1,11 @@
+package library.x {
+ class X {
+ class Foo
+ implicit val foo = new Foo
+ }
+}
+package library { package object x extends X }
+package app {
+ import library.x._
+ object App { implicitly[Foo] }
+}
diff --git a/test/pending/pos/those-kinds-are-high.scala b/test/pending/pos/those-kinds-are-high.scala
index 434e64cefb..78367cb746 100644
--- a/test/pending/pos/those-kinds-are-high.scala
+++ b/test/pending/pos/those-kinds-are-high.scala
@@ -4,18 +4,18 @@ class A {
class C1[T] extends Template[C1] with Container[T]
class C2[T] extends Template[C2] with Container[T]
-
+
/** Target expression:
* List(new C1[String], new C2[String])
*/
-
+
// Here's what would ideally be inferred.
//
// scala> :type List[Template[Container] with Container[String]](new C1[String], new C2[String])
// List[Template[Container] with Container[java.lang.String]]
//
// Here's what it does infer.
- //
+ //
// scala> :type List(new C1[String], new C2[String])
// <console>:8: error: type mismatch;
// found : C1[String]
@@ -43,11 +43,54 @@ class A {
// def fFail = List(new C1[String], new C2[String])
// ^
// two errors found
-
+
/** Working version explicitly typed.
*/
def fExplicit = List[Template[Container] with Container[String]](new C1[String], new C2[String])
-
+
// nope
def fFail = List(new C1[String], new C2[String])
}
+
+
+trait Other {
+ trait GenBar[+A]
+ trait Bar[+A] extends GenBar[A]
+ trait Templ[+A, +CC[X] <: GenBar[X]]
+
+ abstract class CC1[+A] extends Templ[A, CC1] with Bar[A]
+ abstract class CC2[+A] extends Templ[A, CC2] with Bar[A]
+
+ // Compiles
+ class A1 {
+ abstract class BarFactory[CC[X] <: Bar[X]]
+
+ def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+ }
+
+ // Fails - only difference is CC covariant.
+ class A2 {
+ abstract class BarFactory[+CC[X] <: Bar[X]]
+
+ def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+ // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory.
+ // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters:
+ // <empty> has no type parameters, but type CC has one
+ // def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+ // ^
+ // one error found
+ }
+
+ // Compiles - CC contravariant.
+ class A3 {
+ abstract class BarFactory[-CC[X] <: Bar[X]] // with Templ[X, CC]]
+
+ def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+ // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory.
+ // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters:
+ // <empty> has no type parameters, but type CC has one
+ // def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2])
+ // ^
+ // one error found
+ }
+}
diff --git a/test/pending/pos/z1720.scala b/test/pending/pos/z1720.scala
deleted file mode 100644
index 6050f3ff88..0000000000
--- a/test/pending/pos/z1720.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package test
-
-class Thing {
- def info: Info[this.type] = InfoRepository.getInfo(this)
- def info2: Info[this.type] = {
- def self: this.type = this
- InfoRepository.getInfo(self)
- }
-}
-
-trait Info[T]
-case class InfoImpl[T](thing: T) extends Info[T]
-
-object InfoRepository {
- def getInfo(t: Thing): Info[t.type] = InfoImpl(t)
-} \ No newline at end of file
diff --git a/test/pending/run/hk-lub-fail.scala b/test/pending/run/hk-lub-fail.scala
index b58a86ee75..0ac4fdd841 100644
--- a/test/pending/run/hk-lub-fail.scala
+++ b/test/pending/run/hk-lub-fail.scala
@@ -25,12 +25,12 @@ object Test {
// A repl session to get you started.
/*
- val quux1 = EmptyPackageClass.tpe.member(newTermName("Quux1"))
- val quux2 = EmptyPackageClass.tpe.member(newTermName("Quux2"))
+ val quux1 = EmptyPackageClass.tpe.member(TermName("Quux1"))
+ val quux2 = EmptyPackageClass.tpe.member(TermName("Quux2"))
val tps = List(quux1, quux2) map (_.tpe)
- val test = EmptyPackageClass.tpe.member(newTermName("Test"))
- val f = test.tpe.member(newTypeName("F")).tpe
-
+ val test = EmptyPackageClass.tpe.member(TermName("Test"))
+ val f = test.tpe.member(TypeName("F")).tpe
+
val fn = f.normalize.asInstanceOf[ExistentialType]
val fn2 = fn.underlying.asInstanceOf[TypeRef]
*/
diff --git a/test/pending/run/macro-expand-default/Impls_1.scala b/test/pending/run/macro-expand-default/Impls_1.scala
index 7cf8d59c75..bb55f02ab2 100644
--- a/test/pending/run/macro-expand-default/Impls_1.scala
+++ b/test/pending/run/macro-expand-default/Impls_1.scala
@@ -3,8 +3,8 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = {
import c.universe._
- val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala b/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
index be00fd0d8a..eed68280cd 100644
--- a/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
+++ b/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
@@ -3,8 +3,8 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo[U](c: Ctx)(x: c.Expr[U])(evidence: c.Expr[Numeric[U]]) = {
import c.universe._
- val plusOne = Apply(Select(evidence.tree, newTermName("plus")), List(x.tree, Literal(Constant(1))))
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusOne))
+ val plusOne = Apply(Select(evidence.tree, TermName("plus")), List(x.tree, Literal(Constant(1))))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(plusOne))
Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-named/Impls_1.scala b/test/pending/run/macro-expand-named/Impls_1.scala
index 7cf8d59c75..bb55f02ab2 100644
--- a/test/pending/run/macro-expand-named/Impls_1.scala
+++ b/test/pending/run/macro-expand-named/Impls_1.scala
@@ -3,8 +3,8 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = {
import c.universe._
- val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
- val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ val sum = Apply(Select(x.tree, TermName("$minus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(sum))
Expr[Unit](body)
}
} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
index 26de70cc12..487ac79673 100644
--- a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
+++ b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
@@ -4,9 +4,9 @@ object Impls {
def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(())))
}
} \ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
index 26de70cc12..487ac79673 100644
--- a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
+++ b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
@@ -4,9 +4,9 @@ object Impls {
def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
Block(List(
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
Literal(Constant(())))
}
} \ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagless-b/Test_2.scala b/test/pending/run/macro-reify-tagless-b/Test_2.scala
index 10487b1515..09ca6ba30e 100644
--- a/test/pending/run/macro-reify-tagless-b/Test_2.scala
+++ b/test/pending/run/macro-reify-tagless-b/Test_2.scala
@@ -6,8 +6,8 @@ object Test extends App {
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect.ToolBox
val tpt = AppliedTypeTree(Ident(definitions.ListClass), List(Ident(definitions.StringClass)))
- val rhs = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant("hello world"))))
- val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
- val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
+ val rhs = Apply(Select(Ident(TermName("Macros")), TermName("foo")), List(Literal(Constant("hello world"))))
+ val list = ValDef(NoMods, TermName("list"), tpt, rhs)
+ val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Ident(list.name))))
println(cm.mkToolBox().eval(tree))
}
diff --git a/test/pending/run/t5427a.scala b/test/pending/run/t5427a.scala
index f7cd05158d..a7d20922db 100644
--- a/test/pending/run/t5427a.scala
+++ b/test/pending/run/t5427a.scala
@@ -4,7 +4,7 @@ object Foo { val bar = 2 }
object Test extends App {
val tpe = getType(Foo)
- val bar = tpe.nonPrivateMember(newTermName("bar"))
+ val bar = tpe.nonPrivateMember(TermName("bar"))
val value = getValue(Foo, bar)
println(value)
} \ No newline at end of file
diff --git a/test/pending/run/t5427b.scala b/test/pending/run/t5427b.scala
index e80bd12369..af1ae6ea2f 100644
--- a/test/pending/run/t5427b.scala
+++ b/test/pending/run/t5427b.scala
@@ -5,7 +5,7 @@ class Foo { val bar = 2 }
object Test extends App {
val foo = new Foo
val tpe = getType(foo)
- val bar = tpe.nonPrivateMember(newTermName("bar"))
+ val bar = tpe.nonPrivateMember(TermName("bar"))
val value = getValue(foo, bar)
println(value)
} \ No newline at end of file
diff --git a/test/pending/run/t5427c.scala b/test/pending/run/t5427c.scala
index 7095158e85..ba71803080 100644
--- a/test/pending/run/t5427c.scala
+++ b/test/pending/run/t5427c.scala
@@ -5,7 +5,7 @@ class Foo(bar: Int)
object Test extends App {
val foo = new Foo(2)
val tpe = getType(foo)
- val bar = tpe.nonPrivateMember(newTermName("bar"))
+ val bar = tpe.nonPrivateMember(TermName("bar"))
bar match {
case NoSymbol => println("no public member")
case _ => println("i'm screwed")
diff --git a/test/pending/run/t5427d.scala b/test/pending/run/t5427d.scala
index f0cc07d27e..1d37dbdde3 100644
--- a/test/pending/run/t5427d.scala
+++ b/test/pending/run/t5427d.scala
@@ -5,7 +5,7 @@ class Foo(val bar: Int)
object Test extends App {
val foo = new Foo(2)
val tpe = getType(foo)
- val bar = tpe.nonPrivateMember(newTermName("bar"))
+ val bar = tpe.nonPrivateMember(TermName("bar"))
val value = getValue(foo, bar)
println(value)
} \ No newline at end of file
diff --git a/test/postreview.py b/test/postreview.py
deleted file mode 100644
index 2e2518f7ee..0000000000
--- a/test/postreview.py
+++ /dev/null
@@ -1,2540 +0,0 @@
-#!/usr/bin/env python
-import cookielib
-import difflib
-import getpass
-import marshal
-import mimetools
-import ntpath
-import os
-import re
-import socket
-import stat
-import subprocess
-import sys
-import tempfile
-import urllib
-import urllib2
-from optparse import OptionParser
-from tempfile import mkstemp
-from urlparse import urljoin, urlparse
-
-try:
- from hashlib import md5
-except ImportError:
- # Support Python versions before 2.5.
- from md5 import md5
-
-try:
- import json
-except ImportError:
- import simplejson as json
-
-# This specific import is necessary to handle the paths for
-# cygwin enabled machines.
-if (sys.platform.startswith('win')
- or sys.platform.startswith('cygwin')):
- import ntpath as cpath
-else:
- import posixpath as cpath
-
-###
-# Default configuration -- user-settable variables follow.
-###
-
-# The following settings usually aren't needed, but if your Review
-# Board crew has specific preferences and doesn't want to express
-# them with command line switches, set them here and you're done.
-# In particular, setting the REVIEWBOARD_URL variable will allow
-# you to make it easy for people to submit reviews regardless of
-# their SCM setup.
-#
-# Note that in order for this script to work with a reviewboard site
-# that uses local paths to access a repository, the 'Mirror path'
-# in the repository setup page must be set to the remote URL of the
-# repository.
-
-#
-# Reviewboard URL.
-#
-# Set this if you wish to hard-code a default server to always use.
-# It's generally recommended to set this using your SCM repository
-# (for those that support it -- currently only SVN, Git, and Perforce).
-#
-# For example, on SVN:
-# $ svn propset reviewboard:url http://reviewboard.example.com .
-#
-# Or with Git:
-# $ git config reviewboard.url http://reviewboard.example.com
-#
-# On Perforce servers version 2008.1 and above:
-# $ p4 counter reviewboard.url http://reviewboard.example.com
-#
-# Older Perforce servers only allow numerical counters, so embedding
-# the url in the counter name is also supported:
-# $ p4 counter reviewboard.url.http:\|\|reviewboard.example.com 1
-#
-# Note that slashes are not allowed in Perforce counter names, so replace them
-# with pipe characters (they are a safe substitute as they are not used
-# unencoded in URLs). You may need to escape them when issuing the p4 counter
-# command as above.
-#
-# If this is not possible or desired, setting the value here will let
-# you get started quickly.
-#
-# For all other repositories, a .reviewboardrc file present at the top of
-# the checkout will also work. For example:
-#
-# $ cat .reviewboardrc
-# REVIEWBOARD_URL = "http://reviewboard.example.com"
-#
-REVIEWBOARD_URL = None
-
-# Default submission arguments. These are all optional; run this
-# script with --help for descriptions of each argument.
-TARGET_GROUPS = None
-TARGET_PEOPLE = None
-SUBMIT_AS = None
-PUBLISH = False
-OPEN_BROWSER = False
-
-# Debugging. For development...
-DEBUG = False
-
-###
-# End user-settable variables.
-###
-
-
-VERSION = "0.8"
-
-user_config = None
-tempfiles = []
-options = None
-
-
-class APIError(Exception):
- pass
-
-
-class RepositoryInfo:
- """
- A representation of a source code repository.
- """
- def __init__(self, path=None, base_path=None, supports_changesets=False,
- supports_parent_diffs=False):
- self.path = path
- self.base_path = base_path
- self.supports_changesets = supports_changesets
- self.supports_parent_diffs = supports_parent_diffs
- debug("repository info: %s" % self)
-
- def __str__(self):
- return "Path: %s, Base path: %s, Supports changesets: %s" % \
- (self.path, self.base_path, self.supports_changesets)
-
- def set_base_path(self, base_path):
- if not base_path.startswith('/'):
- base_path = '/' + base_path
- debug("changing repository info base_path from %s to %s" % \
- (self.base_path, base_path))
- self.base_path = base_path
-
- def find_server_repository_info(self, server):
- """
- Try to find the repository from the list of repositories on the server.
- For Subversion, this could be a repository with a different URL. For
- all other clients, this is a noop.
- """
- return self
-
-
-class SvnRepositoryInfo(RepositoryInfo):
- """
- A representation of a SVN source code repository. This version knows how to
- find a matching repository on the server even if the URLs differ.
- """
- def __init__(self, path, base_path, uuid, supports_parent_diffs=False):
- RepositoryInfo.__init__(self, path, base_path,
- supports_parent_diffs=supports_parent_diffs)
- self.uuid = uuid
-
- def find_server_repository_info(self, server):
- """
- The point of this function is to find a repository on the server that
- matches self, even if the paths aren't the same. (For example, if self
- uses an 'http' path, but the server uses a 'file' path for the same
- repository.) It does this by comparing repository UUIDs. If the
- repositories use the same path, you'll get back self, otherwise you'll
- get a different SvnRepositoryInfo object (with a different path).
- """
- repositories = server.get_repositories()
-
- for repository in repositories:
- if repository['tool'] != 'Subversion':
- continue
-
- info = self._get_repository_info(server, repository)
-
- if not info or self.uuid != info['uuid']:
- continue
-
- repos_base_path = info['url'][len(info['root_url']):]
- relpath = self._get_relative_path(self.base_path, repos_base_path)
- if relpath:
- return SvnRepositoryInfo(info['url'], relpath, self.uuid)
-
- # We didn't find a matching repository on the server. We'll just return
- # self and hope for the best.
- return self
-
- def _get_repository_info(self, server, repository):
- try:
- return server.get_repository_info(repository['id'])
- except APIError, e:
- # If the server couldn't fetch the repository info, it will return
- # code 210. Ignore those.
- # Other more serious errors should still be raised, though.
- rsp = e.args[0]
- if rsp['err']['code'] == 210:
- return None
-
- raise e
-
- def _get_relative_path(self, path, root):
- pathdirs = self._split_on_slash(path)
- rootdirs = self._split_on_slash(root)
-
- # root is empty, so anything relative to that is itself
- if len(rootdirs) == 0:
- return path
-
- # If one of the directories doesn't match, then path is not relative
- # to root.
- if rootdirs != pathdirs:
- return None
-
- # All the directories matched, so the relative path is whatever
- # directories are left over. The base_path can't be empty, though, so
- # if the paths are the same, return '/'
- if len(pathdirs) == len(rootdirs):
- return '/'
- else:
- return '/'.join(pathdirs[len(rootdirs):])
-
- def _split_on_slash(self, path):
- # Split on slashes, but ignore multiple slashes and throw away any
- # trailing slashes.
- split = re.split('/*', path)
- if split[-1] == '':
- split = split[0:-1]
- return split
-
-
-class ReviewBoardHTTPPasswordMgr(urllib2.HTTPPasswordMgr):
- """
- Adds HTTP authentication support for URLs.
-
- Python 2.4's password manager has a bug in http authentication when the
- target server uses a non-standard port. This works around that bug on
- Python 2.4 installs. This also allows post-review to prompt for passwords
- in a consistent way.
-
- See: http://bugs.python.org/issue974757
- """
- def __init__(self, reviewboard_url):
- self.passwd = {}
- self.rb_url = reviewboard_url
- self.rb_user = None
- self.rb_pass = None
-
- def find_user_password(self, realm, uri):
- if uri.startswith(self.rb_url):
- if self.rb_user is None or self.rb_pass is None:
- print "==> HTTP Authentication Required"
- print 'Enter username and password for "%s" at %s' % \
- (realm, urlparse(uri)[1])
- self.rb_user = raw_input('Username: ')
- self.rb_pass = getpass.getpass('Password: ')
-
- return self.rb_user, self.rb_pass
- else:
- # If this is an auth request for some other domain (since HTTP
- # handlers are global), fall back to standard password management.
- return urllib2.HTTPPasswordMgr.find_user_password(self, realm, uri)
-
-
-class ReviewBoardServer(object):
- """
- An instance of a Review Board server.
- """
- def __init__(self, url, info, cookie_file):
- self.url = url
- if self.url[-1] != '/':
- self.url += '/'
- self._info = info
- self._server_info = None
- self.cookie_file = cookie_file
- self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
-
- # Set up the HTTP libraries to support all of the features we need.
- cookie_handler = urllib2.HTTPCookieProcessor(self.cookie_jar)
- password_mgr = ReviewBoardHTTPPasswordMgr(self.url)
- auth_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
-
- opener = urllib2.build_opener(cookie_handler, auth_handler)
- opener.addheaders = [('User-agent', 'post-review/' + VERSION)]
- urllib2.install_opener(opener)
-
- def login(self, force=False):
- """
- Logs in to a Review Board server, prompting the user for login
- information if needed.
- """
- if not force and self.has_valid_cookie():
- return
-
- print "==> Review Board Login Required"
- print "Enter username and password for Review Board at %s" % self.url
- if options.username:
- username = options.username
- elif options.submit_as:
- username = options.submit_as
- else:
- username = raw_input('Username: ')
-
- if not options.password:
- password = getpass.getpass('Password: ')
- else:
- password = options.password
-
- debug('Logging in with username "%s"' % username)
- try:
- self.api_post('api/json/accounts/login/', {
- 'username': username,
- 'password': password,
- })
- except APIError, e:
- rsp, = e.args
-
- die("Unable to log in: %s (%s)" % (rsp["err"]["msg"],
- rsp["err"]["code"]))
-
- debug("Logged in.")
-
- def has_valid_cookie(self):
- """
- Load the user's cookie file and see if they have a valid
- 'rbsessionid' cookie for the current Review Board server. Returns
- true if so and false otherwise.
- """
- try:
- parsed_url = urlparse(self.url)
- host = parsed_url[1]
- path = parsed_url[2] or '/'
-
- # Cookie files don't store port numbers, unfortunately, so
- # get rid of the port number if it's present.
- host = host.split(":")[0]
-
- debug("Looking for '%s %s' cookie in %s" % \
- (host, path, self.cookie_file))
- self.cookie_jar.load(self.cookie_file, ignore_expires=True)
-
- try:
- cookie = self.cookie_jar._cookies[host][path]['rbsessionid']
-
- if not cookie.is_expired():
- debug("Loaded valid cookie -- no login required")
- return True
-
- debug("Cookie file loaded, but cookie has expired")
- except KeyError:
- debug("Cookie file loaded, but no cookie for this server")
- except IOError, error:
- debug("Couldn't load cookie file: %s" % error)
-
- return False
-
- def new_review_request(self, changenum, submit_as=None):
- """
- Creates a review request on a Review Board server, updating an
- existing one if the changeset number already exists.
-
- If submit_as is provided, the specified user name will be recorded as
- the submitter of the review request (given that the logged in user has
- the appropriate permissions).
- """
- try:
- debug("Attempting to create review request for %s" % changenum)
- data = { 'repository_path': self.info.path }
-
- if changenum:
- data['changenum'] = changenum
-
- if submit_as:
- debug("Submitting the review request as %s" % submit_as)
- data['submit_as'] = submit_as
-
- rsp = self.api_post('api/json/reviewrequests/new/', data)
- except APIError, e:
- rsp, = e.args
-
- if not options.diff_only:
- if rsp['err']['code'] == 204: # Change number in use
- debug("Review request already exists. Updating it...")
- rsp = self.api_post(
- 'api/json/reviewrequests/%s/update_from_changenum/' %
- rsp['review_request']['id'])
- else:
- raise e
-
- debug("Review request created")
- return rsp['review_request']
-
- def set_review_request_field(self, review_request, field, value):
- """
- Sets a field in a review request to the specified value.
- """
- rid = review_request['id']
-
- debug("Attempting to set field '%s' to '%s' for review request '%s'" %
- (field, value, rid))
-
- self.api_post('api/json/reviewrequests/%s/draft/set/' % rid, {
- field: value,
- })
-
- def get_review_request(self, rid):
- """
- Returns the review request with the specified ID.
- """
- rsp = self.api_get('api/json/reviewrequests/%s/' % rid)
- return rsp['review_request']
-
- def get_repositories(self):
- """
- Returns the list of repositories on this server.
- """
- rsp = self.api_get('/api/json/repositories/')
- return rsp['repositories']
-
- def get_repository_info(self, rid):
- """
- Returns detailed information about a specific repository.
- """
- rsp = self.api_get('/api/json/repositories/%s/info/' % rid)
- return rsp['info']
-
- def save_draft(self, review_request):
- """
- Saves a draft of a review request.
- """
- self.api_post("api/json/reviewrequests/%s/draft/save/" %
- review_request['id'])
- debug("Review request draft saved")
-
- def upload_diff(self, review_request, diff_content, parent_diff_content):
- """
- Uploads a diff to a Review Board server.
- """
- debug("Uploading diff, size: %d" % len(diff_content))
-
- if parent_diff_content:
- debug("Uploading parent diff, size: %d" % len(parent_diff_content))
-
- fields = {}
- files = {}
-
- if self.info.base_path:
- fields['basedir'] = self.info.base_path
-
- files['path'] = {
- 'filename': 'diff',
- 'content': diff_content
- }
-
- if parent_diff_content:
- files['parent_diff_path'] = {
- 'filename': 'parent_diff',
- 'content': parent_diff_content
- }
-
- self.api_post('api/json/reviewrequests/%s/diff/new/' %
- review_request['id'], fields, files)
-
- def publish(self, review_request):
- """
- Publishes a review request.
- """
- debug("Publishing")
- self.api_post('api/json/reviewrequests/%s/publish/' %
- review_request['id'])
-
- def _get_server_info(self):
- if not self._server_info:
- self._server_info = self._info.find_server_repository_info(self)
-
- return self._server_info
-
- info = property(_get_server_info)
-
- def process_json(self, data):
- """
- Loads in a JSON file and returns the data if successful. On failure,
- APIError is raised.
- """
- rsp = json.loads(data)
-
- if rsp['stat'] == 'fail':
- raise APIError, rsp
-
- return rsp
-
- def http_get(self, path):
- """
- Performs an HTTP GET on the specified path, storing any cookies that
- were set.
- """
- debug('HTTP GETting %s' % path)
-
- url = self._make_url(path)
-
- try:
- rsp = urllib2.urlopen(url).read()
- self.cookie_jar.save(self.cookie_file)
- return rsp
- except urllib2.HTTPError, e:
- print "Unable to access %s (%s). The host path may be invalid" % \
- (url, e.code)
- try:
- debug(e.read())
- except AttributeError:
- pass
- die()
-
- def _make_url(self, path):
- """Given a path on the server returns a full http:// style url"""
- app = urlparse(self.url)[2]
- if path[0] == '/':
- url = urljoin(self.url, app[:-1] + path)
- else:
- url = urljoin(self.url, app + path)
-
- if not url.startswith('http'):
- url = 'http://%s' % url
- return url
-
- def api_get(self, path):
- """
- Performs an API call using HTTP GET at the specified path.
- """
- return self.process_json(self.http_get(path))
-
- def http_post(self, path, fields, files=None):
- """
- Performs an HTTP POST on the specified path, storing any cookies that
- were set.
- """
- if fields:
- debug_fields = fields.copy()
- else:
- debug_fields = {}
-
- if 'password' in debug_fields:
- debug_fields["password"] = "**************"
- url = self._make_url(path)
- debug('HTTP POSTing to %s: %s' % (url, debug_fields))
-
- content_type, body = self._encode_multipart_formdata(fields, files)
- headers = {
- 'Content-Type': content_type,
- 'Content-Length': str(len(body))
- }
-
- try:
- r = urllib2.Request(url, body, headers)
- data = urllib2.urlopen(r).read()
- self.cookie_jar.save(self.cookie_file)
- return data
- except urllib2.URLError, e:
- try:
- debug(e.read())
- except AttributeError:
- pass
-
- die("Unable to access %s. The host path may be invalid\n%s" % \
- (url, e))
- except urllib2.HTTPError, e:
- die("Unable to access %s (%s). The host path may be invalid\n%s" % \
- (url, e.code, e.read()))
-
- def api_post(self, path, fields=None, files=None):
- """
- Performs an API call using HTTP POST at the specified path.
- """
- return self.process_json(self.http_post(path, fields, files))
-
- def _encode_multipart_formdata(self, fields, files):
- """
- Encodes data for use in an HTTP POST.
- """
- BOUNDARY = mimetools.choose_boundary()
- content = ""
-
- fields = fields or {}
- files = files or {}
-
- for key in fields:
- content += "--" + BOUNDARY + "\r\n"
- content += "Content-Disposition: form-data; name=\"%s\"\r\n" % key
- content += "\r\n"
- content += fields[key] + "\r\n"
-
- for key in files:
- filename = files[key]['filename']
- value = files[key]['content']
- content += "--" + BOUNDARY + "\r\n"
- content += "Content-Disposition: form-data; name=\"%s\"; " % key
- content += "filename=\"%s\"\r\n" % filename
- content += "\r\n"
- content += value + "\r\n"
-
- content += "--" + BOUNDARY + "--\r\n"
- content += "\r\n"
-
- content_type = "multipart/form-data; boundary=%s" % BOUNDARY
-
- return content_type, content
-
-
-class SCMClient(object):
- """
- A base representation of an SCM tool for fetching repository information
- and generating diffs.
- """
- def get_repository_info(self):
- return None
-
- def scan_for_server(self, repository_info):
- """
- Scans the current directory on up to find a .reviewboard file
- containing the server path.
- """
- server_url = self._get_server_from_config(user_config, repository_info)
- if server_url:
- return server_url
-
- for path in walk_parents(os.getcwd()):
- filename = os.path.join(path, ".reviewboardrc")
- if os.path.exists(filename):
- config = load_config_file(filename)
- server_url = self._get_server_from_config(config,
- repository_info)
- if server_url:
- return server_url
-
- return None
-
- def diff(self, args):
- """
- Returns the generated diff and optional parent diff for this
- repository.
-
- The returned tuple is (diff_string, parent_diff_string)
- """
- return (None, None)
-
- def diff_between_revisions(self, revision_range, args, repository_info):
- """
- Returns the generated diff between revisions in the repository.
- """
- return None
-
- def _get_server_from_config(self, config, repository_info):
- if 'REVIEWBOARD_URL' in config:
- return config['REVIEWBOARD_URL']
- elif 'TREES' in config:
- trees = config['TREES']
- if not isinstance(trees, dict):
- die("Warning: 'TREES' in config file is not a dict!")
-
- if repository_info.path in trees and \
- 'REVIEWBOARD_URL' in trees[repository_info.path]:
- return trees[repository_info.path]['REVIEWBOARD_URL']
-
- return None
-
-
-class CVSClient(SCMClient):
- """
- A wrapper around the cvs tool that fetches repository
- information and generates compatible diffs.
- """
- def get_repository_info(self):
- if not check_install("cvs"):
- return None
-
- cvsroot_path = os.path.join("CVS", "Root")
-
- if not os.path.exists(cvsroot_path):
- return None
-
- fp = open(cvsroot_path, "r")
- repository_path = fp.read().strip()
- fp.close()
-
- i = repository_path.find("@")
- if i != -1:
- repository_path = repository_path[i + 1:]
-
- i = repository_path.find(":")
- if i != -1:
- host = repository_path[:i]
- try:
- canon = socket.getfqdn(host)
- repository_path = repository_path.replace('%s:' % host,
- '%s:' % canon)
- except socket.error, msg:
- debug("failed to get fqdn for %s, msg=%s" % (host, msg))
-
- return RepositoryInfo(path=repository_path)
-
- def diff(self, files):
- """
- Performs a diff across all modified files in a CVS repository.
-
- CVS repositories do not support branches of branches in a way that
- makes parent diffs possible, so we never return a parent diff
- (the second value in the tuple).
- """
- return (self.do_diff(files), None)
-
- def diff_between_revisions(self, revision_range, args, repository_info):
- """
- Performs a diff between 2 revisions of a CVS repository.
- """
- revs = []
-
- for rev in revision_range.split(":"):
- revs += ["-r", rev]
-
- return self.do_diff(revs)
-
- def do_diff(self, params):
- """
- Performs the actual diff operation through cvs diff, handling
- fake errors generated by CVS.
- """
- # Diff returns "1" if differences were found.
- return execute(["cvs", "diff", "-uN"] + params,
- extra_ignore_errors=(1,))
-
-
-class ClearCaseClient(SCMClient):
- """
- A wrapper around the clearcase tool that fetches repository
- information and generates compatible diffs.
- This client assumes that cygwin is installed on windows.
- """
- ccroot_path = "/view/reviewboard.diffview/vobs/"
- viewinfo = ""
- viewtype = "snapshot"
-
- def get_filename_hash(self, fname):
- # Hash the filename string so its easy to find the file later on.
- return md5(fname).hexdigest()
-
- def get_repository_info(self):
- if not check_install('cleartool help'):
- return None
-
- # We must be running this from inside a view.
- # Otherwise it doesn't make sense.
- self.viewinfo = execute(["cleartool", "pwv", "-short"])
- if self.viewinfo.startswith('\*\* NONE'):
- return None
-
- # Returning the hardcoded clearcase root path to match the server
- # respository path.
- # There is no reason to have a dynamic path unless you have
- # multiple clearcase repositories. This should be implemented.
- return RepositoryInfo(path=self.ccroot_path,
- base_path=self.ccroot_path,
- supports_parent_diffs=False)
-
- def get_previous_version(self, files):
- file = []
- curdir = os.getcwd()
-
- # Cygwin case must transform a linux-like path to windows like path
- # including drive letter.
- if 'cygdrive' in curdir:
- where = curdir.index('cygdrive') + 9
- drive_letter = curdir[where:where+1]
- curdir = drive_letter + ":\\" + curdir[where+2:len(curdir)]
-
- for key in files:
- # Sometimes there is a quote in the filename. It must be removed.
- key = key.replace('\'', '')
- elem_path = cpath.normpath(os.path.join(curdir, key))
-
- # Removing anything before the last /vobs
- # because it may be repeated.
- elem_path_idx = elem_path.rfind("/vobs")
- if elem_path_idx != -1:
- elem_path = elem_path[elem_path_idx:len(elem_path)].strip("\"")
-
- # Call cleartool to get this version and the previous version
- # of the element.
- curr_version, pre_version = execute(
- ["cleartool", "desc", "-pre", elem_path])
- curr_version = cpath.normpath(curr_version)
- pre_version = pre_version.split(':')[1].strip()
-
- # If a specific version was given, remove it from the path
- # to avoid version duplication
- if "@@" in elem_path:
- elem_path = elem_path[:elem_path.rfind("@@")]
- file.append(elem_path + "@@" + pre_version)
- file.append(curr_version)
-
- # Determnine if the view type is snapshot or dynamic.
- if os.path.exists(file[0]):
- self.viewtype = "dynamic"
-
- return file
-
- def get_extended_namespace(self, files):
- """
- Parses the file path to get the extended namespace
- """
- versions = self.get_previous_version(files)
-
- evfiles = []
- hlist = []
-
- for vkey in versions:
- # Verify if it is a checkedout file.
- if "CHECKEDOUT" in vkey:
- # For checkedout files just add it to the file list
- # since it cannot be accessed outside the view.
- splversions = vkey[:vkey.rfind("@@")]
- evfiles.append(splversions)
- else:
- # For checkedin files.
- ext_path = []
- ver = []
- fname = "" # fname holds the file name without the version.
- (bpath, fpath) = cpath.splitdrive(vkey)
- if bpath :
- # Windows.
- # The version (if specified like file.c@@/main/1)
- # should be kept as a single string
- # so split the path and concat the file name
- # and version in the last position of the list.
- ver = fpath.split("@@")
- splversions = fpath[:vkey.rfind("@@")].split("\\")
- fname = splversions.pop()
- splversions.append(fname + ver[1])
- else :
- # Linux.
- bpath = vkey[:vkey.rfind("vobs")+4]
- fpath = vkey[vkey.rfind("vobs")+5:]
- ver = fpath.split("@@")
- splversions = ver[0][:vkey.rfind("@@")].split("/")
- fname = splversions.pop()
- splversions.append(fname + ver[1])
-
- filename = splversions.pop()
- bpath = cpath.normpath(bpath + "/")
- elem_path = bpath
-
- for key in splversions:
- # For each element (directory) in the path,
- # get its version from clearcase.
- elem_path = cpath.join(elem_path, key)
-
- # This is the version to be appended to the extended
- # path list.
- this_version = execute(
- ["cleartool", "desc", "-fmt", "%Vn",
- cpath.normpath(elem_path)])
- if this_version:
- ext_path.append(key + "/@@" + this_version + "/")
- else:
- ext_path.append(key + "/")
-
- # This must be done in case we haven't specified
- # the version on the command line.
- ext_path.append(cpath.normpath(fname + "/@@" +
- vkey[vkey.rfind("@@")+2:len(vkey)]))
- epstr = cpath.join(bpath, cpath.normpath(''.join(ext_path)))
- evfiles.append(epstr)
-
- """
- In windows, there is a problem with long names(> 254).
- In this case, we hash the string and copy the unextended
- filename to a temp file whose name is the hash.
- This way we can get the file later on for diff.
- The same problem applies to snapshot views where the
- extended name isn't available.
- The previous file must be copied from the CC server
- to a local dir.
- """
- if cpath.exists(epstr) :
- pass
- else:
- if len(epstr) > 254 or self.viewtype == "snapshot":
- name = self.get_filename_hash(epstr)
- # Check if this hash is already in the list
- try:
- i = hlist.index(name)
- die("ERROR: duplicate value %s : %s" %
- (name, epstr))
- except ValueError:
- hlist.append(name)
-
- normkey = cpath.normpath(vkey)
- td = tempfile.gettempdir()
- # Cygwin case must transform a linux-like path to
- # windows like path including drive letter
- if 'cygdrive' in td:
- where = td.index('cygdrive') + 9
- drive_letter = td[where:where+1] + ":"
- td = cpath.join(drive_letter, td[where+1:])
- tf = cpath.normpath(cpath.join(td, name))
- if cpath.exists(tf):
- debug("WARNING: FILE EXISTS")
- os.unlink(tf)
- execute(["cleartool", "get", "-to", tf, normkey])
- else:
- die("ERROR: FILE NOT FOUND : %s" % epstr)
-
- return evfiles
-
- def get_files_from_label(self, label):
- voblist=[]
- # Get the list of vobs for the current view
- allvoblist = execute(["cleartool", "lsvob", "-short"]).split()
- # For each vob, find if the label is present
- for vob in allvoblist:
- try:
- execute(["cleartool", "describe", "-local",
- "lbtype:%s@%s" % (label, vob)]).split()
- voblist.append(vob)
- except:
- pass
-
- filelist=[]
- # For each vob containing the label, get the file list
- for vob in voblist:
- try:
- res = execute(["cleartool", "find", vob, "-all", "-version",
- "lbtype(%s)" % label, "-print"])
- filelist.extend(res.split())
- except :
- pass
-
- # Return only the unique itens
- return set(filelist)
-
- def diff(self, files):
- """
- Performs a diff of the specified file and its previous version.
- """
- # We must be running this from inside a view.
- # Otherwise it doesn't make sense.
- return self.do_diff(self.get_extended_namespace(files))
-
- def diff_label(self, label):
- """
- Get the files that are attached to a label and diff them
- TODO
- """
- return self.diff(self.get_files_from_label(label))
-
- def diff_between_revisions(self, revision_range, args, repository_info):
- """
- Performs a diff between 2 revisions of a CC repository.
- """
- rev_str = ''
-
- for rev in revision_range.split(":"):
- rev_str += "-r %s " % rev
-
- return self.do_diff(rev_str)
-
- def do_diff(self, params):
- # Diff returns "1" if differences were found.
- # Add the view name and view type to the description
- if options.description:
- options.description = ("VIEW: " + self.viewinfo +
- "VIEWTYPE: " + self.viewtype + "\n" + options.description)
- else:
- options.description = (self.viewinfo +
- "VIEWTYPE: " + self.viewtype + "\n")
-
- o = []
- Feol = False
- while len(params) > 0:
- # Read both original and modified files.
- onam = params.pop(0)
- mnam = params.pop(0)
- file_data = []
- do_rem = False
- # If the filename length is greater than 254 char for windows,
- # we copied the file to a temp file
- # because the open will not work for path greater than 254.
- # This is valid for the original and
- # modified files if the name size is > 254.
- for filenam in (onam, mnam) :
- if cpath.exists(filenam) and self.viewtype == "dynamic":
- do_rem = False
- fn = filenam
- elif len(filenam) > 254 or self.viewtype == "snapshot":
- fn = self.get_filename_hash(filenam)
- fn = cpath.join(tempfile.gettempdir(), fn)
- do_rem = True
- fd = open(cpath.normpath(fn))
- fdata = fd.readlines()
- fd.close()
- file_data.append(fdata)
- # If the file was temp, it should be removed.
- if do_rem:
- os.remove(filenam)
-
- modi = file_data.pop()
- orig = file_data.pop()
-
- # For snapshot views, the local directories must be removed because
- # they will break the diff on the server. Just replacing
- # everything before the view name (including the view name) for
- # vobs do the work.
- if (self.viewtype == "snapshot"
- and (sys.platform.startswith('win')
- or sys.platform.startswith('cygwin'))):
- vinfo = self.viewinfo.rstrip("\r\n")
- mnam = "c:\\\\vobs" + mnam[mnam.rfind(vinfo) + len(vinfo):]
- onam = "c:\\\\vobs" + onam[onam.rfind(vinfo) + len(vinfo):]
- # Call the diff lib to generate a diff.
- # The dates are bogus, since they don't natter anyway.
- # The only thing is that two spaces are needed to the server
- # so it can identify the heades correctly.
- diff = difflib.unified_diff(orig, modi, onam, mnam,
- ' 2002-02-21 23:30:39.942229878 -0800',
- ' 2002-02-21 23:30:50.442260588 -0800', lineterm=' \n')
- # Transform the generator output into a string output
- # Use a comprehension instead of a generator,
- # so 2.3.x doesn't fail to interpret.
- diffstr = ''.join([str(l) for l in diff])
- # Workaround for the difflib no new line at end of file
- # problem.
- if not diffstr.endswith('\n'):
- diffstr = diffstr + ("\n\\ No newline at end of file\n")
- o.append(diffstr)
-
- ostr = ''.join(o)
- return (ostr, None) # diff, parent_diff (not supported)
-
-
-class SVNClient(SCMClient):
- """
- A wrapper around the svn Subversion tool that fetches repository
- information and generates compatible diffs.
- """
- def get_repository_info(self):
- if not check_install('svn help'):
- return None
-
- # Get the SVN repository path (either via a working copy or
- # a supplied URI)
- svn_info_params = ["svn", "info"]
- if options.repository_url:
- svn_info_params.append(options.repository_url)
- data = execute(svn_info_params,
- ignore_errors=True)
- m = re.search(r'^Repository Root: (.+)$', data, re.M)
- if not m:
- return None
-
- path = m.group(1)
-
- m = re.search(r'^URL: (.+)$', data, re.M)
- if not m:
- return None
-
- base_path = m.group(1)[len(path):] or "/"
-
- m = re.search(r'^Repository UUID: (.+)$', data, re.M)
- if not m:
- return None
-
- return SvnRepositoryInfo(path, base_path, m.group(1))
-
- def scan_for_server(self, repository_info):
- # Scan first for dot files, since it's faster and will cover the
- # user's $HOME/.reviewboardrc
- server_url = super(SVNClient, self).scan_for_server(repository_info)
- if server_url:
- return server_url
-
- return self.scan_for_server_property(repository_info)
-
- def scan_for_server_property(self, repository_info):
- def get_url_prop(path):
- url = execute(["svn", "propget", "reviewboard:url", path]).strip()
- return url or None
-
- for path in walk_parents(os.getcwd()):
- if not os.path.exists(os.path.join(path, ".svn")):
- break
-
- prop = get_url_prop(path)
- if prop:
- return prop
-
- return get_url_prop(repository_info.path)
-
- def diff(self, files):
- """
- Performs a diff across all modified files in a Subversion repository.
-
- SVN repositories do not support branches of branches in a way that
- makes parent diffs possible, so we never return a parent diff
- (the second value in the tuple).
- """
- return (self.do_diff(["svn", "diff", "--diff-cmd=diff"] + files),
- None)
-
- def diff_between_revisions(self, revision_range, args, repository_info):
- """
- Performs a diff between 2 revisions of a Subversion repository.
- """
- if options.repository_url:
- revisions = revision_range.split(':')
- if len(revisions) < 1:
- return None
- elif len(revisions) == 1:
- revisions.append('HEAD')
-
- # if a new path was supplied at the command line, set it
- if len(args):
- repository_info.set_base_path(args[0])
-
- url = repository_info.path + repository_info.base_path
-
- old_url = url + '@' + revisions[0]
- new_url = url + '@' + revisions[1]
-
- return self.do_diff(["svn", "diff", "--diff-cmd=diff", old_url,
- new_url],
- repository_info)
- # Otherwise, perform the revision range diff using a working copy
- else:
- return self.do_diff(["svn", "diff", "--diff-cmd=diff", "-r",
- revision_range],
- repository_info)
-
- def do_diff(self, cmd, repository_info=None):
- """
- Performs the actual diff operation, handling renames and converting
- paths to absolute.
- """
- diff = execute(cmd, split_lines=True)
- diff = self.handle_renames(diff)
- diff = self.convert_to_absolute_paths(diff, repository_info)
-
- return ''.join(diff)
-
- def handle_renames(self, diff_content):
- """
- The output of svn diff is incorrect when the file in question came
- into being via svn mv/cp. Although the patch for these files are
- relative to its parent, the diff header doesn't reflect this.
- This function fixes the relevant section headers of the patch to
- portray this relationship.
- """
-
- # svn diff against a repository URL on two revisions appears to
- # handle moved files properly, so only adjust the diff file names
- # if they were created using a working copy.
- if options.repository_url:
- return diff_content
-
- result = []
-
- from_line = ""
- for line in diff_content:
- if line.startswith('--- '):
- from_line = line
- continue
-
- # This is where we decide how mangle the previous '--- '
- if line.startswith('+++ '):
- to_file, _ = self.parse_filename_header(line[4:])
- info = self.svn_info(to_file)
- if info.has_key("Copied From URL"):
- url = info["Copied From URL"]
- root = info["Repository Root"]
- from_file = urllib.unquote(url[len(root):])
- result.append(from_line.replace(to_file, from_file))
- else:
- result.append(from_line) #as is, no copy performed
-
- # We only mangle '---' lines. All others get added straight to
- # the output.
- result.append(line)
-
- return result
-
-
- def convert_to_absolute_paths(self, diff_content, repository_info):
- """
- Converts relative paths in a diff output to absolute paths.
- This handles paths that have been svn switched to other parts of the
- repository.
- """
-
- result = []
-
- for line in diff_content:
- front = None
- if line.startswith('+++ ') or line.startswith('--- ') or line.startswith('Index: '):
- front, line = line.split(" ", 1)
-
- if front:
- if line.startswith('/'): #already absolute
- line = front + " " + line
- else:
- # filename and rest of line (usually the revision
- # component)
- file, rest = self.parse_filename_header(line)
-
- # If working with a diff generated outside of a working
- # copy, then file paths are already absolute, so just
- # add initial slash.
- if options.repository_url:
- path = urllib.unquote(
- "%s/%s" % (repository_info.base_path, file))
- else:
- info = self.svn_info(file)
- url = info["URL"]
- root = info["Repository Root"]
- path = urllib.unquote(url[len(root):])
-
- line = front + " " + path + rest
-
- result.append(line)
-
- return result
-
- def svn_info(self, path):
- """Return a dict which is the result of 'svn info' at a given path."""
- svninfo = {}
- for info in execute(["svn", "info", path],
- split_lines=True):
- parts = info.strip().split(": ", 1)
- if len(parts) == 2:
- key, value = parts
- svninfo[key] = value
-
- return svninfo
-
- # Adapted from server code parser.py
- def parse_filename_header(self, s):
- parts = None
- if "\t" in s:
- # There's a \t separating the filename and info. This is the
- # best case scenario, since it allows for filenames with spaces
- # without much work.
- parts = s.split("\t")
-
- # There's spaces being used to separate the filename and info.
- # This is technically wrong, so all we can do is assume that
- # 1) the filename won't have multiple consecutive spaces, and
- # 2) there's at least 2 spaces separating the filename and info.
- if " " in s:
- parts = re.split(r" +", s)
-
- if parts:
- parts[1] = '\t' + parts[1]
- return parts
-
- # strip off ending newline, and return it as the second component
- return [s.split('\n')[0], '\n']
-
-
-class PerforceClient(SCMClient):
- """
- A wrapper around the p4 Perforce tool that fetches repository information
- and generates compatible diffs.
- """
- def get_repository_info(self):
- if not check_install('p4 help'):
- return None
-
- data = execute(["p4", "info"], ignore_errors=True)
-
- m = re.search(r'^Server address: (.+)$', data, re.M)
- if not m:
- return None
-
- repository_path = m.group(1).strip()
-
- try:
- hostname, port = repository_path.split(":")
- info = socket.gethostbyaddr(hostname)
- repository_path = "%s:%s" % (info[0], port)
- except (socket.gaierror, socket.herror):
- pass
-
- return RepositoryInfo(path=repository_path, supports_changesets=True)
-
- def scan_for_server(self, repository_info):
- # Scan first for dot files, since it's faster and will cover the
- # user's $HOME/.reviewboardrc
- server_url = \
- super(PerforceClient, self).scan_for_server(repository_info)
-
- if server_url:
- return server_url
-
- return self.scan_for_server_counter(repository_info)
-
- def scan_for_server_counter(self, repository_info):
- """
- Checks the Perforce counters to see if the Review Board server's url
- is specified. Since Perforce only started supporting non-numeric
- counter values in server version 2008.1, we support both a normal
- counter 'reviewboard.url' with a string value and embedding the url in
- a counter name like 'reviewboard.url.http:||reviewboard.example.com'.
- Note that forward slashes aren't allowed in counter names, so
- pipe ('|') characters should be used. These should be safe because they
- should not be used unencoded in urls.
- """
-
- counters_text = execute(["p4", "counters"])
-
- # Try for a "reviewboard.url" counter first.
- m = re.search(r'^reviewboard.url = (\S+)', counters_text, re.M)
-
- if m:
- return m.group(1)
-
- # Next try for a counter of the form:
- # reviewboard_url.http:||reviewboard.example.com
- m2 = re.search(r'^reviewboard.url\.(\S+)', counters_text, re.M)
-
- if m2:
- return m2.group(1).replace('|', '/')
-
- return None
-
- def get_changenum(self, args):
- if len(args) == 1:
- try:
- return str(int(args[0]))
- except ValueError:
- pass
- return None
-
- def diff(self, args):
- """
- Goes through the hard work of generating a diff on Perforce in order
- to take into account adds/deletes and to provide the necessary
- revision information.
- """
- # set the P4 enviroment:
- if options.p4_client:
- os.environ['P4CLIENT'] = options.p4_client
-
- if options.p4_port:
- os.environ['P4PORT'] = options.p4_port
-
- changenum = self.get_changenum(args)
- if changenum is None:
- return self._path_diff(args)
- else:
- return self._changenum_diff(changenum)
-
-
- def _path_diff(self, args):
- """
- Process a path-style diff. See _changenum_diff for the alternate
- version that handles specific change numbers.
-
- Multiple paths may be specified in `args`. The path styles supported
- are:
-
- //path/to/file
- Upload file as a "new" file.
-
- //path/to/dir/...
- Upload all files as "new" files.
-
- //path/to/file[@#]rev
- Upload file from that rev as a "new" file.
-
- //path/to/file[@#]rev,[@#]rev
- Upload a diff between revs.
-
- //path/to/dir/...[@#]rev,[@#]rev
- Upload a diff of all files between revs in that directory.
- """
- r_revision_range = re.compile(r'^(?P<path>//[^@#]+)' +
- r'(?P<revision1>[#@][^,]+)?' +
- r'(?P<revision2>,[#@][^,]+)?$')
-
- empty_filename = make_tempfile()
- tmp_diff_from_filename = make_tempfile()
- tmp_diff_to_filename = make_tempfile()
-
- diff_lines = []
-
- for path in args:
- m = r_revision_range.match(path)
-
- if not m:
- die('Path %r does not match a valid Perforce path.' % (path,))
- revision1 = m.group('revision1')
- revision2 = m.group('revision2')
- first_rev_path = m.group('path')
-
- if revision1:
- first_rev_path += revision1
- records = self._run_p4(['files', first_rev_path])
-
- # Make a map for convenience.
- files = {}
-
- # Records are:
- # 'rev': '1'
- # 'func': '...'
- # 'time': '1214418871'
- # 'action': 'edit'
- # 'type': 'ktext'
- # 'depotFile': '...'
- # 'change': '123456'
- for record in records:
- if record['action'] != 'delete':
- if revision2:
- files[record['depotFile']] = [record, None]
- else:
- files[record['depotFile']] = [None, record]
-
- if revision2:
- # [1:] to skip the comma.
- second_rev_path = m.group('path') + revision2[1:]
- records = self._run_p4(['files', second_rev_path])
- for record in records:
- if record['action'] != 'delete':
- try:
- m = files[record['depotFile']]
- m[1] = record
- except KeyError:
- files[record['depotFile']] = [None, record]
-
- old_file = new_file = empty_filename
- changetype_short = None
-
- for depot_path, (first_record, second_record) in files.items():
- old_file = new_file = empty_filename
- if first_record is None:
- self._write_file(depot_path + '#' + second_record['rev'],
- tmp_diff_to_filename)
- new_file = tmp_diff_to_filename
- changetype_short = 'A'
- base_revision = 0
- elif second_record is None:
- self._write_file(depot_path + '#' + first_record['rev'],
- tmp_diff_from_filename)
- old_file = tmp_diff_from_filename
- changetype_short = 'D'
- base_revision = int(first_record['rev'])
- else:
- self._write_file(depot_path + '#' + first_record['rev'],
- tmp_diff_from_filename)
- self._write_file(depot_path + '#' + second_record['rev'],
- tmp_diff_to_filename)
- new_file = tmp_diff_to_filename
- old_file = tmp_diff_from_filename
- changetype_short = 'M'
- base_revision = int(first_record['rev'])
-
- dl = self._do_diff(old_file, new_file, depot_path,
- base_revision, changetype_short,
- ignore_unmodified=True)
- diff_lines += dl
-
- os.unlink(empty_filename)
- os.unlink(tmp_diff_from_filename)
- os.unlink(tmp_diff_to_filename)
- return (''.join(diff_lines), None)
-
- def _run_p4(self, command):
- """Execute a perforce command using the python marshal API.
-
- - command: A list of strings of the command to execute.
-
- The return type depends on the command being run.
- """
- command = ['p4', '-G'] + command
- p = subprocess.Popen(command, stdout=subprocess.PIPE)
- result = []
- has_error = False
-
- while 1:
- try:
- data = marshal.load(p.stdout)
- except EOFError:
- break
- else:
- result.append(data)
- if data.get('code', None) == 'error':
- has_error = True
-
- rc = p.wait()
-
- if rc or has_error:
- for record in result:
- if 'data' in record:
- print record['data']
- die('Failed to execute command: %s\n' % (command,))
-
- return result
-
- def _changenum_diff(self, changenum):
- """
- Process a diff for a particular change number. This handles both
- pending and submitted changelists.
-
- See _path_diff for the alternate version that does diffs of depot
- paths.
- """
- # TODO: It might be a good idea to enhance PerforceDiffParser to
- # understand that newFile could include a revision tag for post-submit
- # reviewing.
- cl_is_pending = False
-
- debug("Generating diff for changenum %s" % changenum)
-
- description = execute(["p4", "describe", "-s", changenum],
- split_lines=True)
-
- if '*pending*' in description[0]:
- cl_is_pending = True
-
- # Get the file list
- for line_num, line in enumerate(description):
- if 'Affected files ...' in line:
- break
- else:
- # Got to the end of all the description lines and didn't find
- # what we were looking for.
- die("Couldn't find any affected files for this change.")
-
- description = description[line_num+2:]
-
- diff_lines = []
-
- empty_filename = make_tempfile()
- tmp_diff_from_filename = make_tempfile()
- tmp_diff_to_filename = make_tempfile()
-
- for line in description:
- line = line.strip()
- if not line:
- continue
-
- m = re.search(r'\.\.\. ([^#]+)#(\d+) (add|edit|delete|integrate|branch)', line)
- if not m:
- die("Unsupported line from p4 opened: %s" % line)
-
- depot_path = m.group(1)
- base_revision = int(m.group(2))
- if not cl_is_pending:
- # If the changelist is pending our base revision is the one that's
- # currently in the depot. If we're not pending the base revision is
- # actually the revision prior to this one
- base_revision -= 1
-
- changetype = m.group(3)
-
- debug('Processing %s of %s' % (changetype, depot_path))
-
- old_file = new_file = empty_filename
- old_depot_path = new_depot_path = None
- changetype_short = None
-
- if changetype == 'edit' or changetype == 'integrate':
- # A big assumption
- new_revision = base_revision + 1
-
- # We have an old file, get p4 to take this old version from the
- # depot and put it into a plain old temp file for us
- old_depot_path = "%s#%s" % (depot_path, base_revision)
- self._write_file(old_depot_path, tmp_diff_from_filename)
- old_file = tmp_diff_from_filename
-
- # Also print out the new file into a tmpfile
- if cl_is_pending:
- new_file = self._depot_to_local(depot_path)
- else:
- new_depot_path = "%s#%s" %(depot_path, new_revision)
- self._write_file(new_depot_path, tmp_diff_to_filename)
- new_file = tmp_diff_to_filename
-
- changetype_short = "M"
-
- elif changetype == 'add' or changetype == 'branch':
- # We have a new file, get p4 to put this new file into a pretty
- # temp file for us. No old file to worry about here.
- if cl_is_pending:
- new_file = self._depot_to_local(depot_path)
- else:
- self._write_file(depot_path, tmp_diff_to_filename)
- new_file = tmp_diff_to_filename
- changetype_short = "A"
-
- elif changetype == 'delete':
- # We've deleted a file, get p4 to put the deleted file into a temp
- # file for us. The new file remains the empty file.
- old_depot_path = "%s#%s" % (depot_path, base_revision)
- self._write_file(old_depot_path, tmp_diff_from_filename)
- old_file = tmp_diff_from_filename
- changetype_short = "D"
- else:
- die("Unknown change type '%s' for %s" % (changetype, depot_path))
-
- dl = self._do_diff(old_file, new_file, depot_path, base_revision, changetype_short)
- diff_lines += dl
-
- os.unlink(empty_filename)
- os.unlink(tmp_diff_from_filename)
- os.unlink(tmp_diff_to_filename)
- return (''.join(diff_lines), None)
-
- def _do_diff(self, old_file, new_file, depot_path, base_revision,
- changetype_short, ignore_unmodified=False):
- """
- Do the work of producing a diff for Perforce.
-
- old_file - The absolute path to the "old" file.
- new_file - The absolute path to the "new" file.
- depot_path - The depot path in Perforce for this file.
- base_revision - The base perforce revision number of the old file as
- an integer.
- changetype_short - The change type as a single character string.
- ignore_unmodified - If True, will return an empty list if the file
- is not changed.
-
- Returns a list of strings of diff lines.
- """
- if hasattr(os, 'uname') and os.uname()[0] == 'SunOS':
- diff_cmd = ["gdiff", "-urNp", old_file, new_file]
- else:
- diff_cmd = ["diff", "-urNp", old_file, new_file]
- # Diff returns "1" if differences were found.
- dl = execute(diff_cmd, extra_ignore_errors=(1,2),
- translate_newlines=False)
-
- # If the input file has ^M characters at end of line, lets ignore them.
- dl = dl.replace('\r\r\n', '\r\n')
- dl = dl.splitlines(True)
-
- cwd = os.getcwd()
- if depot_path.startswith(cwd):
- local_path = depot_path[len(cwd) + 1:]
- else:
- local_path = depot_path
-
- # Special handling for the output of the diff tool on binary files:
- # diff outputs "Files a and b differ"
- # and the code below expects the output to start with
- # "Binary files "
- if len(dl) == 1 and \
- dl[0] == ('Files %s and %s differ'% (old_file, new_file)):
- dl = ['Binary files %s and %s differ'% (old_file, new_file)]
-
- if dl == [] or dl[0].startswith("Binary files "):
- if dl == []:
- if ignore_unmodified:
- return []
- else:
- print "Warning: %s in your changeset is unmodified" % \
- local_path
-
- dl.insert(0, "==== %s#%s ==%s== %s ====\n" % \
- (depot_path, base_revision, changetype_short, local_path))
- dl.append('\n')
- else:
- m = re.search(r'(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)', dl[1])
- if m:
- timestamp = m.group(1)
- else:
- # Thu Sep 3 11:24:48 2007
- m = re.search(r'(\w+)\s+(\w+)\s+(\d+)\s+(\d\d:\d\d:\d\d)\s+(\d\d\d\d)', dl[1])
- if not m:
- die("Unable to parse diff header: %s" % dl[1])
-
- month_map = {
- "Jan": "01",
- "Feb": "02",
- "Mar": "03",
- "Apr": "04",
- "May": "05",
- "Jun": "06",
- "Jul": "07",
- "Aug": "08",
- "Sep": "09",
- "Oct": "10",
- "Nov": "11",
- "Dec": "12",
- }
- month = month_map[m.group(2)]
- day = m.group(3)
- timestamp = m.group(4)
- year = m.group(5)
-
- timestamp = "%s-%s-%s %s" % (year, month, day, timestamp)
-
- dl[0] = "--- %s\t%s#%s\n" % (local_path, depot_path, base_revision)
- dl[1] = "+++ %s\t%s\n" % (local_path, timestamp)
-
- return dl
-
- def _write_file(self, depot_path, tmpfile):
- """
- Grabs a file from Perforce and writes it to a temp file. p4 print sets
- the file readonly and that causes a later call to unlink fail. So we
- make the file read/write.
- """
- debug('Writing "%s" to "%s"' % (depot_path, tmpfile))
- execute(["p4", "print", "-o", tmpfile, "-q", depot_path])
- os.chmod(tmpfile, stat.S_IREAD | stat.S_IWRITE)
-
- def _depot_to_local(self, depot_path):
- """
- Given a path in the depot return the path on the local filesystem to
- the same file. If there are multiple results, take only the last
- result from the where command.
- """
- where_output = self._run_p4(['where', depot_path])
- return where_output[-1]['path']
-
-
-class MercurialClient(SCMClient):
- """
- A wrapper around the hg Mercurial tool that fetches repository
- information and generates compatible diffs.
- """
- def get_repository_info(self):
- if not check_install('hg --help'):
- return None
-
- data = execute(["hg", "root"], ignore_errors=True)
- if data.startswith('abort:'):
- # hg aborted => no mercurial repository here.
- return None
-
- # Elsewhere, hg root output give us the repository path.
-
- # We save data here to use it as a fallback. See below
- local_data = data.strip()
-
- svn = execute(["hg", "svn", "info", ], ignore_errors=True)
-
- if (not svn.startswith('abort:') and
- not svn.startswith("hg: unknown command")):
- self.type = 'svn'
- m = re.search(r'^Repository Root: (.+)$', svn, re.M)
-
- if not m:
- return None
-
- path = m.group(1)
- m2 = re.match(r'^(svn\+ssh|http|https)://([-a-zA-Z0-9.]*@)(.*)$',
- path)
- if m2:
- path = '%s://%s' % (m2.group(1), m2.group(3))
-
- m = re.search(r'^URL: (.+)$', svn, re.M)
-
- if not m:
- return None
-
- base_path = m.group(1)[len(path):] or "/"
- return RepositoryInfo(path=path,
- base_path=base_path,
- supports_parent_diffs=True)
-
- self.type = 'hg'
-
- # We are going to search .hg/hgrc for the default path.
- file_name = os.path.join(local_data,'.hg', 'hgrc')
-
- if not os.path.exists(file_name):
- return RepositoryInfo(path=local_data, base_path='/',
- supports_parent_diffs=True)
-
- f = open(file_name)
- data = f.read()
- f.close()
-
- m = re.search(r'^default\s+=\s+(.+)$', data, re.M)
-
- if not m:
- # Return the local path, if no default value is found.
- return RepositoryInfo(path=local_data, base_path='/',
- supports_parent_diffs=True)
-
- path = m.group(1).strip()
-
- return RepositoryInfo(path=path, base_path='',
- supports_parent_diffs=True)
-
- def diff(self, files):
- """
- Performs a diff across all modified files in a Mercurial repository.
- """
- # We don't support parent diffs with Mercurial yet, so we always
- # return None for the parent diff.
- if self.type == 'svn':
- parent = execute(['hg', 'parent', '--svn', '--template',
- '{node}\n']).strip()
-
- if options.parent_branch:
- parent = options.parent_branch
-
- if options.guess_summary and not options.summary:
- options.summary = execute(['hg', 'log', '-r.', '--template',
- r'{desc|firstline}\n'])
-
- if options.guess_description and not options.description:
- numrevs = len(execute(['hg', 'log', '-r.:%s' % parent,
- '--follow', '--template',
- r'{rev}\n']).strip().split('\n'))
- options.description = execute(['hg', 'log', '-r.:%s' % parent,
- '--follow', '--template',
- r'{desc}\n\n', '--limit',
- str(numrevs-1)]).strip()
-
- return (execute(["hg", "diff", "--svn", '-r%s:.' % parent]), None)
-
- return (execute(["hg", "diff"] + files), None)
-
- def diff_between_revisions(self, revision_range, args, repository_info):
- """
- Performs a diff between 2 revisions of a Mercurial repository.
- """
- if self.type != 'hg':
- raise NotImplementedError
-
- r1, r2 = revision_range.split(':')
- return execute(["hg", "diff", "-r", r1, "-r", r2])
-
-
-class GitClient(SCMClient):
- """
- A wrapper around git that fetches repository information and generates
- compatible diffs. This will attempt to generate a diff suitable for the
- remote repository, whether git, SVN or Perforce.
- """
- def get_repository_info(self):
- if not check_install('git --help'):
- return None
-
- git_dir = execute(["git", "rev-parse", "--git-dir"],
- ignore_errors=True).strip()
-
- if git_dir.startswith("fatal:") or not os.path.isdir(git_dir):
- return None
-
- # post-review in directories other than the top level of
- # of a work-tree would result in broken diffs on the server
- os.chdir(os.path.dirname(os.path.abspath(git_dir)))
-
- # We know we have something we can work with. Let's find out
- # what it is. We'll try SVN first.
- data = execute(["git", "svn", "info"], ignore_errors=True)
-
- m = re.search(r'^Repository Root: (.+)$', data, re.M)
- if m:
- path = m.group(1)
- m = re.search(r'^URL: (.+)$', data, re.M)
-
- if m:
- base_path = m.group(1)[len(path):] or "/"
- m = re.search(r'^Repository UUID: (.+)$', data, re.M)
-
- if m:
- uuid = m.group(1)
- self.type = "svn"
-
- return SvnRepositoryInfo(path=path, base_path=base_path,
- uuid=uuid,
- supports_parent_diffs=True)
- else:
- # Versions of git-svn before 1.5.4 don't (appear to) support
- # 'git svn info'. If we fail because of an older git install,
- # here, figure out what version of git is installed and give
- # the user a hint about what to do next.
- version = execute(["git", "svn", "--version"], ignore_errors=True)
- version_parts = re.search('version (\d+)\.(\d+)\.(\d+)',
- version)
- svn_remote = execute(["git", "config", "--get",
- "svn-remote.svn.url"], ignore_errors=True)
-
- if (version_parts and
- not self.is_valid_version((int(version_parts.group(1)),
- int(version_parts.group(2)),
- int(version_parts.group(3))),
- (1, 5, 4)) and
- svn_remote):
- die("Your installation of git-svn must be upgraded to " + \
- "version 1.5.4 or later")
-
- # Okay, maybe Perforce.
- # TODO
-
- # Nope, it's git then.
- origin = execute(["git", "remote", "show", "origin"])
- m = re.search(r'URL: (.+)', origin)
- if m:
- url = m.group(1).rstrip('/')
- if url:
- self.type = "git"
- return RepositoryInfo(path=url, base_path='',
- supports_parent_diffs=True)
-
- return None
-
- def is_valid_version(self, actual, expected):
- """
- Takes two tuples, both in the form:
- (major_version, minor_version, micro_version)
- Returns true if the actual version is greater than or equal to
- the expected version, and false otherwise.
- """
- return (actual[0] > expected[0]) or \
- (actual[0] == expected[0] and actual[1] > expected[1]) or \
- (actual[0] == expected[0] and actual[1] == expected[1] and \
- actual[2] >= expected[2])
-
- def scan_for_server(self, repository_info):
- # Scan first for dot files, since it's faster and will cover the
- # user's $HOME/.reviewboardrc
- server_url = super(GitClient, self).scan_for_server(repository_info)
-
- if server_url:
- return server_url
-
- # TODO: Maybe support a server per remote later? Is that useful?
- url = execute(["git", "config", "--get", "reviewboard.url"],
- ignore_errors=True).strip()
- if url:
- return url
-
- if self.type == "svn":
- # Try using the reviewboard:url property on the SVN repo, if it
- # exists.
- prop = SVNClient().scan_for_server_property(repository_info)
-
- if prop:
- return prop
-
- return None
-
- def diff(self, args):
- """
- Performs a diff across all modified files in the branch, taking into
- account a parent branch.
- """
- parent_branch = options.parent_branch or "master"
-
- diff_lines = self.make_diff(parent_branch)
-
- if parent_branch != "master":
- parent_diff_lines = self.make_diff("master", parent_branch)
- else:
- parent_diff_lines = None
-
- if options.guess_summary and not options.summary:
- options.summary = execute(["git", "log", "--pretty=format:%s",
- "HEAD^.."], ignore_errors=True).strip()
-
- if options.guess_description and not options.description:
- options.description = execute(
- ["git", "log", "--pretty=format:%s%n%n%b", parent_branch + ".."],
- ignore_errors=True).strip()
-
- return (diff_lines, parent_diff_lines)
-
- def make_diff(self, parent_branch, source_branch=""):
- """
- Performs a diff on a particular branch range.
- """
- if self.type == "svn":
- diff_lines = execute(["git", "diff", "--no-color", "--no-prefix",
- "-r", "-u", "%s..%s" % (parent_branch,
- source_branch)],
- split_lines=True)
- return self.make_svn_diff(parent_branch, diff_lines)
- elif self.type == "git":
- return execute(["git", "diff", "--no-color", "--full-index",
- parent_branch])
-
- return None
-
- def make_svn_diff(self, parent_branch, diff_lines):
- """
- Formats the output of git diff such that it's in a form that
- svn diff would generate. This is needed so the SVNTool in Review
- Board can properly parse this diff.
- """
- rev = execute(["git", "svn", "find-rev", "master"]).strip()
-
- if not rev:
- return None
-
- diff_data = ""
- filename = ""
- revision = ""
- newfile = False
-
- for line in diff_lines:
- if line.startswith("diff "):
- # Grab the filename and then filter this out.
- # This will be in the format of:
- #
- # diff --git a/path/to/file b/path/to/file
- info = line.split(" ")
- diff_data += "Index: %s\n" % info[2]
- diff_data += "=" * 67
- diff_data += "\n"
- elif line.startswith("index "):
- # Filter this out.
- pass
- elif line.strip() == "--- /dev/null":
- # New file
- newfile = True
- elif line.startswith("--- "):
- newfile = False
- diff_data += "--- %s\t(revision %s)\n" % \
- (line[4:].strip(), rev)
- elif line.startswith("+++ "):
- filename = line[4:].strip()
- if newfile:
- diff_data += "--- %s\t(revision 0)\n" % filename
- diff_data += "+++ %s\t(revision 0)\n" % filename
- else:
- # We already printed the "--- " line.
- diff_data += "+++ %s\t(working copy)\n" % filename
- else:
- diff_data += line
-
- return diff_data
-
- def diff_between_revisions(self, revision_range, args, repository_info):
- pass
-
-
-SCMCLIENTS = (
- SVNClient(),
- CVSClient(),
- GitClient(),
- MercurialClient(),
- PerforceClient(),
- ClearCaseClient(),
-)
-
-def debug(s):
- """
- Prints debugging information if post-review was run with --debug
- """
- if DEBUG or options and options.debug:
- print ">>> %s" % s
-
-
-def make_tempfile():
- """
- Creates a temporary file and returns the path. The path is stored
- in an array for later cleanup.
- """
- fd, tmpfile = mkstemp()
- os.close(fd)
- tempfiles.append(tmpfile)
- return tmpfile
-
-
-def check_install(command):
- """
- Try executing an external command and return a boolean indicating whether
- that command is installed or not. The 'command' argument should be
- something that executes quickly, without hitting the network (for
- instance, 'svn help' or 'git --version').
- """
- try:
- p = subprocess.Popen(command.split(' '),
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- return True
- except OSError:
- return False
-
-
-def execute(command, env=None, split_lines=False, ignore_errors=False,
- extra_ignore_errors=(), translate_newlines=True):
- """
- Utility function to execute a command and return the output.
- """
- if isinstance(command, list):
- debug(subprocess.list2cmdline(command))
- else:
- debug(command)
-
- if env:
- env.update(os.environ)
- else:
- env = os.environ.copy()
-
- env['LC_ALL'] = 'en_US.UTF-8'
- env['LANGUAGE'] = 'en_US.UTF-8'
-
- if sys.platform.startswith('win'):
- p = subprocess.Popen(command,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- shell=False,
- universal_newlines=translate_newlines,
- env=env)
- else:
- p = subprocess.Popen(command,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- shell=False,
- close_fds=True,
- universal_newlines=translate_newlines,
- env=env)
- if split_lines:
- data = p.stdout.readlines()
- else:
- data = p.stdout.read()
- rc = p.wait()
- if rc and not ignore_errors and rc not in extra_ignore_errors:
- die('Failed to execute command: %s\n%s' % (command, data))
-
- return data
-
-
-def die(msg=None):
- """
- Cleanly exits the program with an error message. Erases all remaining
- temporary files.
- """
- for tmpfile in tempfiles:
- try:
- os.unlink(tmpfile)
- except:
- pass
-
- if msg:
- print msg
-
- sys.exit(1)
-
-
-def walk_parents(path):
- """
- Walks up the tree to the root directory.
- """
- while os.path.splitdrive(path)[1] != os.sep:
- yield path
- path = os.path.dirname(path)
-
-
-def load_config_file(filename):
- """
- Loads data from a config file.
- """
- config = {
- 'TREES': {},
- }
-
- if os.path.exists(filename):
- try:
- execfile(filename, config)
- except:
- pass
-
- return config
-
-
-def tempt_fate(server, tool, changenum, diff_content=None,
- parent_diff_content=None, submit_as=None, retries=3):
- """
- Attempts to create a review request on a Review Board server and upload
- a diff. On success, the review request path is displayed.
- """
- try:
- save_draft = False
-
- if options.rid:
- review_request = server.get_review_request(options.rid)
- else:
- review_request = server.new_review_request(changenum, submit_as)
-
- if options.target_groups:
- server.set_review_request_field(review_request, 'target_groups',
- options.target_groups)
- save_draft = True
-
- if options.target_people:
- server.set_review_request_field(review_request, 'target_people',
- options.target_people)
- save_draft = True
-
- if options.summary:
- server.set_review_request_field(review_request, 'summary',
- options.summary)
- save_draft = True
-
- if options.branch:
- server.set_review_request_field(review_request, 'branch',
- options.branch)
- save_draft = True
-
- if options.bugs_closed:
- server.set_review_request_field(review_request, 'bugs_closed',
- options.bugs_closed)
- save_draft = True
-
- if options.description:
- server.set_review_request_field(review_request, 'description',
- options.description)
- save_draft = True
-
- if options.testing_done:
- server.set_review_request_field(review_request, 'testing_done',
- options.testing_done)
- save_draft = True
-
- if save_draft:
- server.save_draft(review_request)
- except APIError, e:
- rsp, = e.args
- if rsp['err']['code'] == 103: # Not logged in
- retries = retries - 1
-
- # We had an odd issue where the server ended up a couple of
- # years in the future. Login succeeds but the cookie date was
- # "odd" so use of the cookie appeared to fail and eventually
- # ended up at max recursion depth :-(. Check for a maximum
- # number of retries.
- if retries >= 0:
- server.login(force=True)
- tempt_fate(server, tool, changenum, diff_content,
- parent_diff_content, submit_as, retries=retries)
- return
-
- if options.rid:
- die("Error getting review request %s: %s (code %s)" % \
- (options.rid, rsp['err']['msg'], rsp['err']['code']))
- else:
- die("Error creating review request: %s (code %s)" % \
- (rsp['err']['msg'], rsp['err']['code']))
-
-
- if not server.info.supports_changesets or not options.change_only:
- try:
- server.upload_diff(review_request, diff_content,
- parent_diff_content)
- except APIError, e:
- rsp, = e.args
- print "Error uploading diff: %s (%s)" % (rsp['err']['msg'],
- rsp['err']['code'])
- debug(rsp)
- die("Your review request still exists, but the diff is not " +
- "attached.")
-
- if options.publish:
- server.publish(review_request)
-
- request_url = 'r/' + str(review_request['id'])
- review_url = urljoin(server.url, request_url)
-
- if not review_url.startswith('http'):
- review_url = 'http://%s' % review_url
-
- print "Review request #%s posted." % (review_request['id'],)
- print
- print review_url
-
- return review_url
-
-
-def parse_options(args):
- parser = OptionParser(usage="%prog [-pond] [-r review_id] [changenum]",
- version="%prog " + VERSION)
-
- parser.add_option("-p", "--publish",
- dest="publish", action="store_true", default=PUBLISH,
- help="publish the review request immediately after "
- "submitting")
- parser.add_option("-r", "--review-request-id",
- dest="rid", metavar="ID", default=None,
- help="existing review request ID to update")
- parser.add_option("-o", "--open",
- dest="open_browser", action="store_true",
- default=OPEN_BROWSER,
- help="open a web browser to the review request page")
- parser.add_option("-n", "--output-diff",
- dest="output_diff_only", action="store_true",
- default=False,
- help="outputs a diff to the console and exits. "
- "Does not post")
- parser.add_option("--server",
- dest="server", default=REVIEWBOARD_URL,
- metavar="SERVER",
- help="specify a different Review Board server "
- "to use")
- parser.add_option("--diff-only",
- dest="diff_only", action="store_true", default=False,
- help="uploads a new diff, but does not update "
- "info from changelist")
- parser.add_option("--target-groups",
- dest="target_groups", default=TARGET_GROUPS,
- help="names of the groups who will perform "
- "the review")
- parser.add_option("--target-people",
- dest="target_people", default=TARGET_PEOPLE,
- help="names of the people who will perform "
- "the review")
- parser.add_option("--summary",
- dest="summary", default=None,
- help="summary of the review ")
- parser.add_option("--description",
- dest="description", default=None,
- help="description of the review ")
- parser.add_option("--description-file",
- dest="description_file", default=None,
- help="text file containing a description of the review")
- parser.add_option("--guess-summary",
- dest="guess_summary", action="store_true",
- default=False,
- help="guess summary from the latest commit (git/"
- "hgsubversion only)")
- parser.add_option("--guess-description",
- dest="guess_description", action="store_true",
- default=False,
- help="guess description based on commits on this branch "
- "(git/hgsubversion only)")
- parser.add_option("--testing-done",
- dest="testing_done", default=None,
- help="details of testing done ")
- parser.add_option("--testing-done-file",
- dest="testing_file", default=None,
- help="text file containing details of testing done ")
- parser.add_option("--branch",
- dest="branch", default=None,
- help="affected branch ")
- parser.add_option("--bugs-closed",
- dest="bugs_closed", default=None,
- help="list of bugs closed ")
- parser.add_option("--revision-range",
- dest="revision_range", default=None,
- help="generate the diff for review based on given "
- "revision range")
- parser.add_option("--label",
- dest="label", default=None,
- help="label (ClearCase Only) ")
- parser.add_option("--submit-as",
- dest="submit_as", default=SUBMIT_AS, metavar="USERNAME",
- help="user name to be recorded as the author of the "
- "review request, instead of the logged in user")
- parser.add_option("--username",
- dest="username", default=None, metavar="USERNAME",
- help="user name to be supplied to the reviewboard server")
- parser.add_option("--password",
- dest="password", default=None, metavar="PASSWORD",
- help="password to be supplied to the reviewboard server")
- parser.add_option("--change-only",
- dest="change_only", action="store_true",
- default=False,
- help="updates info from changelist, but does "
- "not upload a new diff (only available if your "
- "repository supports changesets)")
- parser.add_option("--parent",
- dest="parent_branch", default=None,
- metavar="PARENT_BRANCH",
- help="the parent branch this diff should be against "
- "(only available if your repository supports "
- "parent diffs)")
- parser.add_option("--p4-client",
- dest="p4_client", default=None,
- help="the Perforce client name that the review is in")
- parser.add_option("--p4-port",
- dest="p4_port", default=None,
- help="the Perforce servers IP address that the review is on")
- parser.add_option("--repository-url",
- dest="repository_url", default=None,
- help="the url for a repository for creating a diff "
- "outside of a working copy (currently only supported "
- "by Subversion). Requires --revision-range")
- parser.add_option("-d", "--debug",
- action="store_true", dest="debug", default=DEBUG,
- help="display debug output")
-
- (globals()["options"], args) = parser.parse_args(args)
-
- if options.description and options.description_file:
- sys.stderr.write("The --description and --description-file options "
- "are mutually exclusive.\n")
- sys.exit(1)
-
- if options.description_file:
- if os.path.exists(options.description_file):
- fp = open(options.description_file, "r")
- options.description = fp.read()
- fp.close()
- else:
- sys.stderr.write("The description file %s does not exist.\n" %
- options.description_file)
- sys.exit(1)
-
- if options.testing_done and options.testing_file:
- sys.stderr.write("The --testing-done and --testing-done-file options "
- "are mutually exclusive.\n")
- sys.exit(1)
-
- if options.testing_file:
- if os.path.exists(options.testing_file):
- fp = open(options.testing_file, "r")
- options.testing_done = fp.read()
- fp.close()
- else:
- sys.stderr.write("The testing file %s does not exist.\n" %
- options.testing_file)
- sys.exit(1)
-
- if options.repository_url and not options.revision_range:
- sys.stderr.write("The --repository-url option requires the "
- "--revision-range option.\n")
- sys.exit(1)
-
- return args
-
-def determine_client():
-
- repository_info = None
- tool = None
-
- # Try to find the SCM Client we're going to be working with.
- for tool in SCMCLIENTS:
- repository_info = tool.get_repository_info()
-
- if repository_info:
- break
-
- if not repository_info:
- if options.repository_url:
- print "No supported repository could be access at the supplied url."
- else:
- print "The current directory does not contain a checkout from a"
- print "supported source code repository."
- sys.exit(1)
-
- # Verify that options specific to an SCM Client have not been mis-used.
- if options.change_only and not repository_info.supports_changesets:
- sys.stderr.write("The --change-only option is not valid for the "
- "current SCM client.\n")
- sys.exit(1)
-
- if options.parent_branch and not repository_info.supports_parent_diffs:
- sys.stderr.write("The --parent option is not valid for the "
- "current SCM client.\n")
- sys.exit(1)
-
- if ((options.p4_client or options.p4_port) and \
- not isinstance(tool, PerforceClient)):
- sys.stderr.write("The --p4-client and --p4-port options are not valid "
- "for the current SCM client.\n")
- sys.exit(1)
-
- return (repository_info, tool)
-
-def main():
- if 'USERPROFILE' in os.environ:
- homepath = os.path.join(os.environ["USERPROFILE"], "Local Settings",
- "Application Data")
- elif 'HOME' in os.environ:
- homepath = os.environ["HOME"]
- else:
- homepath = ''
-
- # Load the config and cookie files
- globals()['user_config'] = \
- load_config_file(os.path.join(homepath, ".reviewboardrc"))
- cookie_file = os.path.join(homepath, ".post-review-cookies.txt")
-
- args = parse_options(sys.argv[1:])
-
- repository_info, tool = determine_client()
-
- # Try to find a valid Review Board server to use.
- if options.server:
- server_url = options.server
- else:
- server_url = tool.scan_for_server(repository_info)
-
- if not server_url:
- print "Unable to find a Review Board server for this source code tree."
- sys.exit(1)
-
- server = ReviewBoardServer(server_url, repository_info, cookie_file)
-
- if repository_info.supports_changesets:
- changenum = tool.get_changenum(args)
- else:
- changenum = None
-
- if options.revision_range:
- diff = tool.diff_between_revisions(options.revision_range, args,
- repository_info)
- parent_diff = None
- elif options.label and isinstance(tool, ClearCaseClient):
- diff, parent_diff = tool.diff_label(options.label)
- else:
- diff, parent_diff = tool.diff(args)
-
- if options.output_diff_only:
- print diff
- sys.exit(0)
-
- # Let's begin.
- server.login()
-
- review_url = tempt_fate(server, tool, changenum, diff_content=diff,
- parent_diff_content=parent_diff,
- submit_as=options.submit_as)
-
- # Load the review up in the browser if requested to:
- if options.open_browser:
- try:
- import webbrowser
- if 'open_new_tab' in dir(webbrowser):
- # open_new_tab is only in python 2.5+
- webbrowser.open_new_tab(review_url)
- elif 'open_new' in dir(webbrowser):
- webbrowser.open_new(review_url)
- else:
- os.system( 'start %s' % review_url )
- except:
- print 'Error opening review URL: %s' % review_url
-
-
-if __name__ == "__main__":
- main()
diff --git a/test/review b/test/review
deleted file mode 100755
index e1ccb9c0af..0000000000
--- a/test/review
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/sh
-
-if [ -z $1 ] || [ "$1" = "-h" ] || [ "$1" = "--help" ] || [ "$1" = "-help" ] || [ "$1" = "-?" ]; then
- echo "Usage: `basename $0` [rev] [args]\n"
- echo " [rev] : either the revision number without leading 'r' (post-commit),"
- echo " or '-loc' to create a review from current local changes (pre-commit)\n"
- echo " [args] : optional arguments:"
- echo " -r ID existing review request ID to update\n"
- exit 1
-fi
-
-POSTREVIEW=`dirname $0`/postreview.py
-
-if [ "$1" = "-loc" ]; then
- echo "creating review request from local changes..."
- REVARG=""
- LOG=""
- SUMMARY="local changes"
- REPO=""
-else
- REV=$1
- PREV=`expr $REV - 1`
- if [ $? -ne 0 ]; then
- echo "argument revision not a number: $REV"
- exit 1
- fi
-
- echo "creating review request for changeset $REV..."
-
- LOG="`svn log http://lampsvn.epfl.ch/svn-repos/scala -c $REV`"
- if [ $? -ne 0 ]; then
- echo "could not get svn log for revision $REV"
- exit 1
- fi
-
- REVARG="--revision-range=$PREV:$REV"
- SUMMARY="r$REV"
- REPO="--repository-url=http://lampsvn.epfl.ch/svn-repos/scala"
-fi
-
-
-shift # remove parameter $1 (revision)
-
-python $POSTREVIEW --server="https://chara2.epfl.ch" $REVARG --summary="$SUMMARY" --description="$LOG" $REPO -o $@
diff --git a/test/scaladoc/resources/SI_4715.scala b/test/scaladoc/resources/SI_4715.scala
index 29daf43717..de286956bc 100644
--- a/test/scaladoc/resources/SI_4715.scala
+++ b/test/scaladoc/resources/SI_4715.scala
@@ -1,7 +1,7 @@
class SI_4715 {
type :+:[X,Y] = Map[X,Y]
- val withType: Int :+: Double = error("")
+ val withType: Int :+: Double = sys.error("")
trait :-:[X,Y]
- val withTrait: Int :-: Double = error("")
+ val withTrait: Int :-: Double = sys.error("")
}
diff --git a/test/scaladoc/resources/Trac4325.scala b/test/scaladoc/resources/Trac4325.scala
index ffb968d571..ccc2f1900a 100644
--- a/test/scaladoc/resources/Trac4325.scala
+++ b/test/scaladoc/resources/Trac4325.scala
@@ -1,5 +1,5 @@
-case class WithSynthetic
+case class WithSynthetic()
-case class WithObject
+case class WithObject()
object WithObject
diff --git a/test/scaladoc/resources/doc-root/Any.scala b/test/scaladoc/resources/doc-root/Any.scala
index 031b7d9d8c..fd4c287b4f 100644
--- a/test/scaladoc/resources/doc-root/Any.scala
+++ b/test/scaladoc/resources/doc-root/Any.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/scaladoc/resources/doc-root/AnyRef.scala b/test/scaladoc/resources/doc-root/AnyRef.scala
index 7d8b9f9e76..362fbcf0f5 100644
--- a/test/scaladoc/resources/doc-root/AnyRef.scala
+++ b/test/scaladoc/resources/doc-root/AnyRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/scaladoc/resources/doc-root/Nothing.scala b/test/scaladoc/resources/doc-root/Nothing.scala
index eed6066039..57f6fac3f9 100644
--- a/test/scaladoc/resources/doc-root/Nothing.scala
+++ b/test/scaladoc/resources/doc-root/Nothing.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/scaladoc/resources/doc-root/Null.scala b/test/scaladoc/resources/doc-root/Null.scala
index 7455e78ae7..931beb2d1a 100644
--- a/test/scaladoc/resources/doc-root/Null.scala
+++ b/test/scaladoc/resources/doc-root/Null.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala
index d6c0332c10..1d17e9a6d3 100644
--- a/test/scaladoc/resources/implicits-base-res.scala
+++ b/test/scaladoc/resources/implicits-base-res.scala
@@ -11,21 +11,21 @@ trait MyNumeric[R]
* - tests the complete type inference
* - the following inherited methods should appear:
* {{{
- * def convToGtColonDoubleA(x: Double) // pimpA3: with a constraint that T <: Double
- * def convToIntA(x: Int) // pimpA2: with a constraint that T = Int
- * def convToManifestA(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
- * def convToMyNumericA(x: T) // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
- * def convToNumericA(x: T) // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
- * def convToPimpedA(x: Bar[Foo[T]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: S) // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED
- * def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
- * def convToTraversableOps(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ * def convToGtColonDoubleA(x: Double) // enrichA3: with a constraint that T <: Double
+ * def convToIntA(x: Int) // enrichA2: with a constraint that T = Int
+ * def convToManifestA(x: T) // enrichA7: with 2 constraints: T: Manifest and T <: Double
+ * def convToMyNumericA(x: T) // enrichA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+ * def convToNumericA(x: T) // enrichA1: with a constraint that there is x: Numeric[T] implicit in scope
+ * def convToEnrichedA(x: Bar[Foo[T]]) // enrichA5: no constraints, SHADOWED
+ * def convToEnrichedA(x: S) // enrichA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED
+ * def convToEnrichedA(x: T) // enrichA0: with no constraints, SHADOWED
+ * def convToTraversableOps(x: T) // enrichA7: with 2 constraints: T: Manifest and T <: Double
* // should not be abstract!
* }}}
*/
class A[T] {
- /** This should prevent the implicitly inherited `def convToPimpedA: T` from `pimpA0` from showing up */
- def convToPimpedA(x: T): T = sys.error("Let's check it out!")
+ /** This should prevent the implicitly inherited `def convToEnrichedA: T` from `enrichA0` from showing up */
+ def convToEnrichedA(x: T): T = sys.error("Let's check it out!")
/** This should check implicit member elimination in the case of subtyping */
def foo(a: T, b: AnyRef): T
}
@@ -33,15 +33,15 @@ class A[T] {
object A {
import language.implicitConversions // according to SIP18
- implicit def pimpA0[V](a: A[V]) = new PimpedA(a)
- implicit def pimpA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a)
- implicit def pimpA2(a: A[Int]) = new IntA(a)
- implicit def pimpA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a)
- implicit def pimpA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): PimpedA[S] = sys.error("not implemented")
- implicit def pimpA5[Z](a: A[Z]): PimpedA[Bar[Foo[Z]]] = sys.error("not implemented")
- implicit def pimpA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a)
+ implicit def enrichA0[V](a: A[V]) = new EnrichedA(a)
+ implicit def enrichA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a)
+ implicit def enrichA2(a: A[Int]) = new IntA(a)
+ implicit def enrichA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a)
+ implicit def enrichA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): EnrichedA[S] = sys.error("not implemented")
+ implicit def enrichA5[Z](a: A[Z]): EnrichedA[Bar[Foo[Z]]] = sys.error("not implemented")
+ implicit def enrichA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a)
// TODO: Add H <: Double and see why it crashes for C and D -- context bounds, need to check!
- implicit def pimpA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps(x: H): H = sys.error("no") }
+ implicit def enrichA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps(x: H): H = sys.error("no") }
}
@@ -49,14 +49,14 @@ object A {
* - tests the existential type solving
* - the following inherited methods should appear:
* {{{
- * def convToGtColonDoubleA(x: Double) // pimpA3: no constraints
- * def convToManifestA(x: Double) // pimpA7: no constraints
- * def convToMyNumericA(x: Double) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
- * def convToNumericA(x: Double) // pimpA1: no constraintsd
- * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
- * def convToTraversableOps(x: Double) // pimpA7: no constraints
- * // should not be abstract!
+ * def convToGtColonDoubleA(x: Double) // enrichA3: no constraints
+ * def convToManifestA(x: Double) // enrichA7: no constraints
+ * def convToMyNumericA(x: Double) // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+ * def convToNumericA(x: Double) // enrichA1: no constraintsd
+ * def convToEnrichedA(x: Bar[Foo[Double]]) // enrichA5: no constraints, SHADOWED
+ * def convToEnrichedA(x: Double) // enrichA0: no constraints, SHADOWED
+ * def convToTraversableOps(x: Double) // enrichA7: no constraints
+ * // should not be abstract!
* }}}
*/
class B extends A[Double]
@@ -67,11 +67,11 @@ object B extends A
* - tests asSeenFrom
* - the following inherited methods should appear:
* {{{
- * def convToIntA(x: Int) // pimpA2: no constraints
- * def convToMyNumericA(x: Int) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
- * def convToNumericA(x: Int) // pimpA1: no constraints
- * def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
- * def convToPimpedA(x: Bar[Foo[Int]]) // pimpA5: no constraints, SHADOWED
+ * def convToIntA(x: Int) // enrichA2: no constraints
+ * def convToMyNumericA(x: Int) // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+ * def convToNumericA(x: Int) // enrichA1: no constraints
+ * def convToEnrichedA(x: Int) // enrichA0: no constraints, SHADOWED
+ * def convToEnrichedA(x: Bar[Foo[Int]]) // enrichA5: no constraints, SHADOWED
* }}}
*/
class C extends A[Int]
@@ -82,10 +82,10 @@ object C extends A
* - tests implicit elimination
* - the following inherited methods should appear:
* {{{
- * def convToMyNumericA(x: String) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
- * def convToNumericA(x: String) // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
- * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints, SHADOWED
- * def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
+ * def convToMyNumericA(x: String) // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+ * def convToNumericA(x: String) // enrichA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+ * def convToEnrichedA(x: Bar[Foo[String]]) // enrichA5: no constraints, SHADOWED
+ * def convToEnrichedA(x: String) // enrichA0: no constraints, SHADOWED
* }}}
*/
class D extends A[String]
@@ -93,12 +93,12 @@ class D extends A[String]
object D extends A
-/** PimpedA class <br/>
+/** EnrichedA class <br/>
* - tests simple inheritance and asSeenFrom
* - A, B and C should be implicitly converted to this */
-class PimpedA[V](a: A[V]) {
- /** The convToPimpedA: V documentation... */
- def convToPimpedA(x: V): V = sys.error("Not implemented")
+class EnrichedA[V](a: A[V]) {
+ /** The convToEnrichedA: V documentation... */
+ def convToEnrichedA(x: V): V = sys.error("Not implemented")
}
/** NumericA class <br/>
diff --git a/test/scaladoc/resources/links.scala b/test/scaladoc/resources/links.scala
index bd69665357..ecac9c63cf 100644
--- a/test/scaladoc/resources/links.scala
+++ b/test/scaladoc/resources/links.scala
@@ -25,6 +25,7 @@ package scala.test.scaladoc.links {
object Target {
type T = Int => Int
type S = Int
+ type ::[X] = scala.collection.immutable.::[X]
class C
def foo(i: Int) = 2
def foo(z: String) = 3
@@ -46,6 +47,7 @@ package scala.test.scaladoc.links {
* - [[[[Target!.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
* - [[Target$.T object Target -> type T]]
* - [[Target$.S object Target -> type S]]
+ * - [[Target$.:: object Target -> type ::]]
* - [[Target$.foo(z:Str* object Target -> def foo]]
* - [[Target$.bar object Target -> def bar]]
* - [[[[Target$.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
diff --git a/test/scaladoc/run/SI-6812.scala b/test/scaladoc/run/SI-6812.scala
deleted file mode 100644
index fbd9588ede..0000000000
--- a/test/scaladoc/run/SI-6812.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import scala.tools.nsc.doc.model._
-import scala.tools.partest.ScaladocModelTest
-import language._
-
-object Test extends ScaladocModelTest {
-
- override def code = """
- import scala.reflect.macros.Context
- import language.experimental.macros
-
- object Macros {
- def impl(c: Context) = c.literalUnit
- def foo = macro impl
- }
-
- class C {
- def bar = Macros.foo
- }
- """
-
- def scaladocSettings = ""
- override def extraSettings = super.extraSettings + " -Ymacro-no-expand"
- def testModel(root: Package) = ()
-}
diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala
index 3d57306f5d..8f8652cdb3 100644
--- a/test/scaladoc/run/implicits-base.scala
+++ b/test/scaladoc/run/implicits-base.scala
@@ -25,54 +25,54 @@ object Test extends ScaladocModelTest {
val A = base._class("A")
- // def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
- conv = A._conversion(A.qualifiedName + ".pimpA0")
+ // def convToEnrichedA(x: T) // enrichA0: with no constraints, SHADOWED
+ conv = A._conversion(A.qualifiedName + ".enrichA0")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "T")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "T")
- // def convToNumericA: T // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
- conv = A._conversion(A.qualifiedName + ".pimpA1")
+ // def convToNumericA: T // enrichA1: with a constraint that there is x: Numeric[T] implicit in scope
+ conv = A._conversion(A.qualifiedName + ".enrichA1")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToNumericA").resultType.name == "T")
- // def convToIntA: Int // pimpA2: with a constraint that T = Int
- conv = A._conversion(A.qualifiedName + ".pimpA2")
+ // def convToIntA: Int // enrichA2: with a constraint that T = Int
+ conv = A._conversion(A.qualifiedName + ".enrichA2")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToIntA").resultType.name == "Int")
- // def convToGtColonDoubleA: Double // pimpA3: with a constraint that T <: Double
- conv = A._conversion(A.qualifiedName + ".pimpA3")
+ // def convToGtColonDoubleA: Double // enrichA3: with a constraint that T <: Double
+ conv = A._conversion(A.qualifiedName + ".enrichA3")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
- // def convToPimpedA: S // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
- conv = A._conversion(A.qualifiedName + ".pimpA4")
+ // def convToEnrichedA: S // enrichA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
+ conv = A._conversion(A.qualifiedName + ".enrichA4")
assert(conv.members.length == 1)
assert(conv.constraints.length == 3)
- assert(conv._member("convToPimpedA").resultType.name == "S")
+ assert(conv._member("convToEnrichedA").resultType.name == "S")
- // def convToPimpedA: Bar[Foo[T]] // pimpA5: no constraints
- conv = A._conversion(A.qualifiedName + ".pimpA5")
+ // def convToEnrichedA: Bar[Foo[T]] // enrichA5: no constraints
+ conv = A._conversion(A.qualifiedName + ".enrichA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[T]]")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[T]]")
- // def convToMyNumericA: T // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
- conv = A._conversion(A.qualifiedName + ".pimpA6")
+ // def convToMyNumericA: T // enrichA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+ conv = A._conversion(A.qualifiedName + ".enrichA6")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToMyNumericA").resultType.name == "T")
- // def convToManifestA: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
- // def convToTraversableOps: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ // def convToManifestA: T // enrichA7: with 2 constraints: T: Manifest and T <: Double
+ // def convToTraversableOps: T // enrichA7: with 2 constraints: T: Manifest and T <: Double
// should not be abstract!
- conv = A._conversion(A.qualifiedName + ".pimpA7")
+ conv = A._conversion(A.qualifiedName + ".enrichA7")
assert(conv.members.length == 2)
assert(conv.constraints.length == 2)
assert(conv._member("convToManifestA").resultType.name == "T")
@@ -84,45 +84,45 @@ object Test extends ScaladocModelTest {
val B = base._class("B")
// these conversions should not affect B
- assert(B._conversions(A.qualifiedName + ".pimpA2").isEmpty)
- assert(B._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+ assert(B._conversions(A.qualifiedName + ".enrichA2").isEmpty)
+ assert(B._conversions(A.qualifiedName + ".enrichA4").isEmpty)
- // def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
- conv = B._conversion(A.qualifiedName + ".pimpA0")
+ // def convToEnrichedA(x: Double) // enrichA0: no constraints, SHADOWED
+ conv = B._conversion(A.qualifiedName + ".enrichA0")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "Double")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "Double")
- // def convToNumericA: Double // pimpA1: no constraintsd
- conv = B._conversion(A.qualifiedName + ".pimpA1")
+ // def convToNumericA: Double // enrichA1: no constraintsd
+ conv = B._conversion(A.qualifiedName + ".enrichA1")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
assert(conv._member("convToNumericA").resultType.name == "Double")
- // def convToGtColonDoubleA: Double // pimpA3: no constraints
- conv = B._conversion(A.qualifiedName + ".pimpA3")
+ // def convToGtColonDoubleA: Double // enrichA3: no constraints
+ conv = B._conversion(A.qualifiedName + ".enrichA3")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
- // def convToPimpedA: Bar[Foo[Double]] // pimpA5: no constraints
- conv = B._conversion(A.qualifiedName + ".pimpA5")
+ // def convToEnrichedA: Bar[Foo[Double]] // enrichA5: no constraints
+ conv = B._conversion(A.qualifiedName + ".enrichA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Double]]")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[Double]]")
- // def convToMyNumericA: Double // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
- conv = B._conversion(A.qualifiedName + ".pimpA6")
+ // def convToMyNumericA: Double // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+ conv = B._conversion(A.qualifiedName + ".enrichA6")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToMyNumericA").resultType.name == "Double")
- // def convToManifestA: Double // pimpA7: no constraints
- // def convToTraversableOps: Double // pimpA7: no constraints
+ // def convToManifestA: Double // enrichA7: no constraints
+ // def convToTraversableOps: Double // enrichA7: no constraints
// // should not be abstract!
- conv = B._conversion(A.qualifiedName + ".pimpA7")
+ conv = B._conversion(A.qualifiedName + ".enrichA7")
assert(conv.members.length == 2)
assert(conv.constraints.length == 0)
assert(conv._member("convToManifestA").resultType.name == "Double")
@@ -134,38 +134,38 @@ object Test extends ScaladocModelTest {
val C = base._class("C")
// these conversions should not affect C
- assert(C._conversions(A.qualifiedName + ".pimpA3").isEmpty)
- assert(C._conversions(A.qualifiedName + ".pimpA4").isEmpty)
- assert(C._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".enrichA3").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".enrichA4").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".enrichA7").isEmpty)
- // def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
- conv = C._conversion(A.qualifiedName + ".pimpA0")
+ // def convToEnrichedA(x: Int) // enrichA0: no constraints, SHADOWED
+ conv = C._conversion(A.qualifiedName + ".enrichA0")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "Int")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "Int")
- // def convToNumericA: Int // pimpA1: no constraints
- conv = C._conversion(A.qualifiedName + ".pimpA1")
+ // def convToNumericA: Int // enrichA1: no constraints
+ conv = C._conversion(A.qualifiedName + ".enrichA1")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
assert(conv._member("convToNumericA").resultType.name == "Int")
- // def convToIntA: Int // pimpA2: no constraints
- conv = C._conversion(A.qualifiedName + ".pimpA2")
+ // def convToIntA: Int // enrichA2: no constraints
+ conv = C._conversion(A.qualifiedName + ".enrichA2")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
assert(conv._member("convToIntA").resultType.name == "Int")
- // def convToPimpedA: Bar[Foo[Int]] // pimpA5: no constraints
- conv = C._conversion(A.qualifiedName + ".pimpA5")
+ // def convToEnrichedA: Bar[Foo[Int]] // enrichA5: no constraints
+ conv = C._conversion(A.qualifiedName + ".enrichA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Int]]")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[Int]]")
- // def convToMyNumericA: Int // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
- conv = C._conversion(A.qualifiedName + ".pimpA6")
+ // def convToMyNumericA: Int // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+ conv = C._conversion(A.qualifiedName + ".enrichA6")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToMyNumericA").resultType.name == "Int")
@@ -175,33 +175,33 @@ object Test extends ScaladocModelTest {
val D = base._class("D")
// these conversions should not affect D
- assert(D._conversions(A.qualifiedName + ".pimpA2").isEmpty)
- assert(D._conversions(A.qualifiedName + ".pimpA3").isEmpty)
- assert(D._conversions(A.qualifiedName + ".pimpA4").isEmpty)
- assert(D._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".enrichA2").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".enrichA3").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".enrichA4").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".enrichA7").isEmpty)
- // def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
- conv = D._conversion(A.qualifiedName + ".pimpA0")
+ // def convToEnrichedA(x: String) // enrichA0: no constraints, SHADOWED
+ conv = D._conversion(A.qualifiedName + ".enrichA0")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "String")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "String")
- // def convToNumericA: String // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
- conv = D._conversion(A.qualifiedName + ".pimpA1")
+ // def convToNumericA: String // enrichA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+ conv = D._conversion(A.qualifiedName + ".enrichA1")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToNumericA").resultType.name == "String")
- // def convToPimpedA: Bar[Foo[String]] // pimpA5: no constraints
- conv = D._conversion(A.qualifiedName + ".pimpA5")
+ // def convToEnrichedA: Bar[Foo[String]] // enrichA5: no constraints
+ conv = D._conversion(A.qualifiedName + ".enrichA5")
assert(conv.members.length == 1)
assert(conv.constraints.length == 0)
- assert(isShadowed(conv._member("convToPimpedA")))
- assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[String]]")
+ assert(isShadowed(conv._member("convToEnrichedA")))
+ assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[String]]")
- // def convToMyNumericA: String // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
- conv = D._conversion(A.qualifiedName + ".pimpA6")
+ // def convToMyNumericA: String // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+ conv = D._conversion(A.qualifiedName + ".enrichA6")
assert(conv.members.length == 1)
assert(conv.constraints.length == 1)
assert(conv._member("convToMyNumericA").resultType.name == "String")
diff --git a/test/scaladoc/run/links.scala b/test/scaladoc/run/links.scala
index fde24edb2a..64441c2d95 100644
--- a/test/scaladoc/run/links.scala
+++ b/test/scaladoc/run/links.scala
@@ -26,7 +26,7 @@ object Test extends ScaladocModelTest {
val memberLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToMember[_, _]])
val templateLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToTpl[_]])
- assert(memberLinks == 17, memberLinks + " == 17 (the member links in object TEST)")
+ assert(memberLinks == 18, memberLinks + " == 18 (the member links in object TEST)")
assert(templateLinks == 6, templateLinks + " == 6 (the template links in object TEST)")
}
}
diff --git a/test/files/run/t5527.check b/test/scaladoc/run/t5527.check
index 1518168c51..ab2aeb2d67 100644
--- a/test/files/run/t5527.check
+++ b/test/scaladoc/run/t5527.check
@@ -1,3 +1,12 @@
+newSource1:17: warning: discarding unmoored doc comment
+ /** Testing 123 */
+ ^
+newSource1:27: warning: discarding unmoored doc comment
+ /** Calculate this result. */
+ ^
+newSource1:34: warning: discarding unmoored doc comment
+ /** Another digit is a giveaway. */
+ ^
[[syntax trees at end of parser]] // newSource1
package <empty> {
object UselessComments extends scala.AnyRef {
diff --git a/test/files/run/t5527.scala b/test/scaladoc/run/t5527.scala
index 2449ff60c3..2449ff60c3 100644
--- a/test/files/run/t5527.scala
+++ b/test/scaladoc/run/t5527.scala
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
index 13eacf79a5..d7b5e48288 100644
--- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -680,7 +680,7 @@ object Test extends Properties("HtmlFactory") {
property("package object") = files("com/example/p1/package.html") match {
case node: scala.xml.Node =>
- node.toString contains "com.example.p1.package#packageObjectMethod"
+ node.toString contains "com.example.p1#packageObjectMethod"
case _ => false
}
diff --git a/test/scaladoc/scalacheck/IndexScriptTest.scala b/test/scaladoc/scalacheck/IndexScriptTest.scala
index 5aef38e00a..37f6947aaa 100644
--- a/test/scaladoc/scalacheck/IndexScriptTest.scala
+++ b/test/scaladoc/scalacheck/IndexScriptTest.scala
@@ -35,7 +35,7 @@ object Test extends Properties("IndexScript") {
}
property("allPackages") = {
- createIndexScript("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndexScript("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
index.allPackages.map(_.toString) == List(
"scala",
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index bf385898fc..dc4ab126d4 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -56,7 +56,7 @@ object Test extends Properties("Index") {
}
property("path") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
index.path == List("index.html")
case None => false
@@ -64,7 +64,7 @@ object Test extends Properties("Index") {
}
property("title") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
index.title == ""
@@ -72,7 +72,7 @@ object Test extends Properties("Index") {
}
}
property("browser contants a script element") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match {
case Some(index) =>
(index.browser \ "script").size == 1
diff --git a/test/script-tests/jar-manifest/run-test.check b/test/script-tests/jar-manifest/run-test.check
index ef59a6cbac..546bf6be9d 100644
--- a/test/script-tests/jar-manifest/run-test.check
+++ b/test/script-tests/jar-manifest/run-test.check
@@ -1,4 +1,4 @@
-Scala code runner version 2.10.0.r26038-b20111121102734 -- Copyright 2002-2011, LAMP/EPFL
+Scala code runner version 2.10.0.r26038-b20111121102734 -- Copyright 2002-2013, LAMP/EPFL
% pwd
/scala/trunk/test/script-tests/jar-manifest/target
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 4fe6dd67a0..704bf4944d 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -208,7 +208,7 @@ pullJarFile() {
local sha1=$(cat ${jar}${desired_ext})
local jar_dir=$(dirname $jar)
local jar_name=${jar#$jar_dir/}
- local version=${sha1% ?$jar_name}
+ local version=${sha1%% *}
local remote_uri=${version}/${jar#$basedir/}
echo "Resolving [${remote_uri}]"
pullJarFileToCache $remote_uri $version
diff --git a/tools/buildcp b/tools/buildcp
index 766ab81f90..3ae70e10a3 100755
--- a/tools/buildcp
+++ b/tools/buildcp
@@ -8,4 +8,4 @@ lib=$($dir/abspath $dir/../lib)
build=$($dir/abspath $dir/../build)
cp=$($dir/cpof $build/$1/classes):$build/asm/classes
-echo $cp:$lib/fjbg.jar:$lib/msil.jar:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*'
+echo $cp:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*'
diff --git a/tools/make-release-notes b/tools/make-release-notes
deleted file mode 100755
index dcd206f7fc..0000000000
--- a/tools/make-release-notes
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env bash
-
-# This tool is used to build a *scaffold* of a release note that you can fill in details with before posting to the list.
-# It aims to provide *all* the information you need, and probably need to prune it before releasing.
-# Author: jsuereth
-
-fixMessages() {
- local tag1="$1"
- local tag2="$2"
- git log $tag1..$tag2 "--format=format: * %h - %s" --no-merges --grep "SI-"
-}
-
-allcommitMessages() {
- local tag1="$1"
- local tag2="$2"
- git log $tag1..$tag2 "--format=format: * %h - %s" --no-merges
-}
-
-authors() {
- local tag1="$1"
- local tag2="$2"
- git log $tag1..$tag2 --format=format:%an --no-merges | sort | uniq -c | sort -rh
-}
-
-
-message() {
- local tag1="$1"
- local tag2="$2"
-
- echo "A new release of Scala is available! Please point your build tools at ${tag2#v}"
- echo
- echo "Here's a list of the issues that have been fixed since ${tag1#v}: "
- fixMessages "$tag1" "$tag2"
- echo
- echo
- echo "Special thanks to all the contributions!"
- echo "------- --------------------------------"
- authors "$tag1" "$tag2"
- echo "------- --------------------------------"
- echo
- echo
- echo "Here's a complete list of changes:"
- allcommitMessages "$tag1" "$tag2"
-}
-
-
-message "$1" "$2"
-
-
diff --git a/tools/make-release-notes.scala b/tools/make-release-notes.scala
new file mode 100644
index 0000000000..3e5b60d223
--- /dev/null
+++ b/tools/make-release-notes.scala
@@ -0,0 +1,129 @@
+#!/bin/sh
+exec scala -feature $0 $@
+!#
+
+import sys.process._
+
+val tag1 = "v2.10.0-M4"
+val tag2 = "v2.10.0-M5"
+
+// Git commit parsing magikz
+
+case class Commit(sha: String, author: String, header: String, body: String) {
+ override def toString = " * " + sha + " (" + author + ") " + header + " - " + body.take(5) + " ..."
+}
+
+val gitFormat = "--format=format:*-*%h``%aN``%s``%b"
+
+def processGitCommits(input: String): IndexedSeq[Commit] =
+ ((input split "[\\r\\n]*\\*\\-\\*").view map (_ split "``") collect {
+ case Array(sha, author, hdr, msg) => Commit(sha, author, hdr, msg)
+ }).toVector
+
+val commits =
+ processGitCommits(Process(Seq("git", "log", tag1+".."+tag2,"--format=format:*-*%h``%aN``%s``%b","--no-merges")).!!)
+
+val authors: Seq[(String, Int)] = {
+ val grouped: Vector[(String,Int)] = (commits groupBy (_.author)).map { case (a,c) => a -> c.length }{collection.breakOut}
+ (grouped sortBy (_._2)).reverse
+}
+
+def hasFixins(msg: String): Boolean = (
+ (msg contains "SI-") /*&& ((msg.toLowerCase contains "fix") || (msg.toLowerCase contains "close"))*/
+)
+
+val fixCommits =
+ for {
+ commit <- commits
+ searchString = commit.body + commit.header
+ if hasFixins(searchString)
+ } yield commit
+
+
+val siPattern = java.util.regex.Pattern.compile("(SI-[0-9]+)")
+
+def fixLinks(commit: Commit): String = {
+ val searchString = commit.body + commit.header
+ val m = siPattern matcher searchString
+ val issues = new collection.mutable.ArrayBuffer[String]
+ while(m.find()) {
+ issues += (m group 1)
+ }
+ issues map (si => """<a href="https://issues.scala-lang.org/browse/%s">%s</a>""" format (si, si)) mkString ", "
+}
+
+
+// HTML Generation for Toni
+
+def commitShaLink(sha: String) =
+ """<a href="https://github.com/scala/scala/commit/%s">%s</a>""" format (sha,sha)
+
+def printBlankLine(): Unit = println("<p>&nbsp</p>")
+def printHeader4(msg: String): Unit = println("<h4>%s</h4>" format (msg))
+
+def printCommiterList(): Unit = {
+ printBlankLine()
+ printHeader4("Special thanks to all the contribtuors!")
+ println("""<table border="0" cellspacing="0" cellpadding="1">
+ <thead><tr><th>#</th><th align="left">Author</th></tr></thead>
+ <tbody>""")
+ for((author, count) <- authors)
+ println("""<tr><td align="right">%d &nbsp;</td><td>%s</td></tr>""" format (count, author))
+ println("""</tbody>
+</table>""")
+}
+
+def printCommitList(): Unit = {
+ printBlankLine()
+ printHeader4("Complete commit list!")
+ println("""<table border="0" cellspacing="0" cellpadding="1">
+ <thead><tr><th>sha</th><th align="left">Title</th></tr></thead>
+ <tbody>""")
+ for(commit <- commits) {
+ println("<tr>")
+ println("""<td align="right">%s&nbsp;</td><td>%s</td>""" format (commitShaLink(commit.sha), commit.header))
+ /*print("<td>")
+ (commit.body split "[\\r\\n]") foreach { line =>
+ print(line)
+ print("<br/>")
+ }
+ print("</td>")*/
+ println("""</tr>""")
+ }
+ println("""</tbody>
+</table>""")
+}
+
+def issueFixPrinter(): Unit = {
+ printBlankLine()
+ printHeader4("Here's a list of isssues that have been fixed since %s" format (tag1))
+ println("""<table border="0" cellspacing="0" cellpading="1">
+ <thead><tr><th>Issue(s)</th><th>Commit</th><th>Message</th></tr></thead>
+ <tbody>""")
+ for(commit <- fixCommits) {
+ println("""<tr><td>%s&nbsp;</td><td>%s&nbsp;</td><td>%s</td></tr>""" format(fixLinks(commit), commitShaLink(commit.sha), commit.header))
+ }
+ println("""</tbody>
+</table>""")
+ printBlankLine()
+}
+
+def printHTML(): Unit = {
+ println("""<html>
+ <head>
+ <title>%s - Release notes</title>
+ </head>
+ <body>
+ <h3>A new release of Scala is available! Please point your build tools at %s</h3>
+ <p>:: INSERT HAND GENERATED NOTES HERE ::</p>
+""" format(tag2, tag2 drop 1))
+ issueFixPrinter()
+ printCommiterList()
+ printCommitList()
+ println("""</body></html>""")
+}
+
+printHTML()
+
+
+
diff --git a/tools/partest-ack b/tools/partest-ack
new file mode 100755
index 0000000000..f7d5063292
--- /dev/null
+++ b/tools/partest-ack
@@ -0,0 +1,141 @@
+#!/usr/bin/env bash
+#
+# wrapper around partest for fine-grained test selection via ack
+
+declare quiet failed update partest_debug file_regex partest_args ack_args cotouched
+
+[[ $# -gt 0 ]] || {
+ cat <<EOM
+Usage: $0 <regex> [-dfquvp] [ack options]
+
+ -d pass --debug to partest
+ -f pass --failed to partest
+ -q DON'T pass --show-log and --show-diff to partest
+ -u pass --update-check to partest
+ -v pass --verbose to partest
+ -p <path> select tests appearing in commits where <path> was also modified
+
+Given a regular expression (and optionally, any arguments accepted by ack)
+runs all the tests for which any associated file matches the regex. Associated
+files include .check and .flags files. Tests in directories will match if any
+file matches. A file can match the regex by its contents or by its name.
+
+You must have ack installed: http://betterthangrep.com/ack-standalone
+
+Examples:
+
+ > tools/partest-ack monad
+ % tests-with-matching-paths ... 2
+ % tests-with-matching-code ... 2
+ # 4 tests to run.
+
+ > tools/partest-ack -p src/library/scala/Enumeration.scala
+ % tests-modified-in-same-commit ... 84
+ # 84 tests to run.
+
+ > tools/partest-ack -f
+ % tests-which-failed ... 42
+ # 42 tests to run.
+EOM
+
+ exit 0
+}
+
+# The leading : in :achs suppresses some errors. Each letter is a valid
+# option. If an option takes an argument, a colon follows it, e.g.
+# it would be :ach:s if -h took an argument.
+while getopts :fuvdp: opt; do
+ case $opt in
+ d) partest_debug=true && partest_args="$partest_args --debug" ;;
+ f) failed=true && partest_args="$partest_args --failed" ;;
+ p) cotouched="$cotouched $OPTARG" ;;
+ q) quiet=true ;;
+ u) partest_args="$partest_args --update-check" ;;
+ v) partest_args="$partest_args --verbose" ;;
+ :) echo "Option -$OPTARG requires an argument." >&2 ;; # this case is called for a missing option argument
+ *) echo "Unrecognized argument $OPTARG" ;; # this is the catch-all implying an unknown option
+ esac
+done
+
+shift $((OPTIND-1))
+file_regex="$1"
+ack_args="$*"
+
+tests () {
+ find test/files -mindepth 2 -maxdepth 2 -name '*.scala' -o -type d
+}
+
+pathsToTests () {
+ for path in $(perl -pe 's#^(test/files/[^/]+/[^/.]+).*$#$1#'); do
+ if [[ -d "$path" ]]; then
+ echo "$path"
+ elif [[ -f "$path.scala" ]]; then
+ echo "$path.scala"
+ fi
+ done | sort -u
+}
+
+tests-with-matching-paths() {
+ local re="$1"
+ for p in $(find test/files -type f); do
+ [[ $p =~ $re ]] && echo "$p"
+ done
+}
+
+tests-which-failed () {
+ for f in $(find test/files -name '*.log'); do
+ echo ${f%-*}
+ done
+}
+
+tests-modified-in-same-commit() {
+ [[ $# -gt 0 ]] && \
+ for rev in $(git rev-list HEAD -- "$@"); do
+ git --no-pager show --pretty="format:" --name-only "$rev" -- test/files
+ done
+}
+
+tests-with-matching-code() {
+ ack --noenv --text --files-with-matches "$@" -- test/files
+}
+
+countStdout () {
+ local -i count=0
+ while read line; do
+ printf "$line\n"
+ count+=1
+ done
+
+ printf >&2 " $count\n"
+}
+
+testRun () {
+ printf >&2 "%% %-30s ... " "$1"
+ "$@" | pathsToTests | countStdout
+}
+
+allMatches() {
+ [[ -n $file_regex ]] && testRun tests-with-matching-paths $file_regex
+ [[ -n $cotouched ]] && testRun tests-modified-in-same-commit $cotouched
+ [[ -n $ack_args ]] && testRun tests-with-matching-code $ack_args
+ [[ -n $failed ]] && testRun tests-which-failed
+}
+
+paths=$(allMatches | sort -u)
+[[ -n $quiet ]] || partest_args="--show-diff --show-log $partest_args"
+
+if [[ -z $paths ]] && [[ -z $failed ]]; then
+ echo >&2 "No matching tests."
+else
+ count=$(echo $(echo "$paths" | wc -w))
+
+ # Output a command line which will re-run these same tests.
+ echo "# $count tests to run."
+ printf "%-52s %s\n" "test/partest $partest_args" "\\"
+ for path in $paths; do
+ printf " %-50s %s\n" "$path" "\\"
+ done
+ echo ' ""'
+
+ test/partest $partest_args $paths
+fi
diff --git a/tools/stability-test.sh b/tools/stability-test.sh
new file mode 100755
index 0000000000..f017ac0842
--- /dev/null
+++ b/tools/stability-test.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+
+declare failed
+
+echo "Comparing build/quick/classes and build/strap/classes"
+for dir in library reflect compiler; do
+ # feel free to replace by a more elegant approach -- don't know how
+ if diff -rw -x '*.css' \
+ -x '*.custom' \
+ -x '*.gif' \
+ -x '*.js' \
+ -x '*.layout' \
+ -x '*.png' \
+ -x '*.properties' \
+ -x '*.tmpl' \
+ -x '*.tooltip' \
+ -x '*.txt' \
+ -x '*.xml' \
+ build/{quick,strap}/classes/$dir
+ then
+ classes=$(find build/quick/classes/$dir -name '*.class' | wc -l)
+ printf "%8s: %5d classfiles verified identical\n" $dir $classes
+ else
+ failed=true
+ fi
+done
+
+[[ -z $failed ]] || exit 127
diff --git a/tools/strapcp b/tools/strapcp
index 6a46b4e1c8..6a4044ae24 100755
--- a/tools/strapcp
+++ b/tools/strapcp
@@ -6,7 +6,6 @@ strap="$dir/../build/strap/classes"
[[ -d $strap ]] || { echo "Error: no directory at $strap"; exit 1; }
cp=$($dir/cpof $strap)
-fjbg=$($dir/abspath $dir/../lib/fjbg.jar)
asm=$($dir/abspath $dir/../build/asm/classes)
-echo $cp:$fjbg:$asm
+echo $cp:$asm